aboutsummaryrefslogtreecommitdiffstats
path: root/lang
diff options
context:
space:
mode:
authormiwi <miwi@FreeBSD.org>2008-09-08 08:14:06 +0800
committermiwi <miwi@FreeBSD.org>2008-09-08 08:14:06 +0800
commit84a9a33ae404fa0929eb791b146e68a9c778a3eb (patch)
treec085994c39fb54f745e9ccb25739321f7c92a3ea /lang
parent558286f25ed9b06d18bfe25caee04dd2747afa3e (diff)
downloadfreebsd-ports-gnome-84a9a33ae404fa0929eb791b146e68a9c778a3eb.tar.gz
freebsd-ports-gnome-84a9a33ae404fa0929eb791b146e68a9c778a3eb.tar.zst
freebsd-ports-gnome-84a9a33ae404fa0929eb791b146e68a9c778a3eb.zip
- Security fixes
Multiple vulnerabilities: 1) Various integer overflow errors exist in core modules e.g. stringobject, unicodeobject, bufferobject, longobject, tupleobject, stropmodule, gcmodule, mmapmodule. 2) An integer overflow in the hashlib module can lead to an unreliable cryptographic digest results. 3) Integer overflow errors in the processing of unicode strings can be exploited to cause buffer overflows on 32-bit systems. 4) An integer overflow exists in the PyOS_vsnprintf() function on architectures that do not have a "vsnprintf()" function. 5) An integer underflow error in the PyOS_vsnprintf() function when passing zero-length strings can lead to memory corruption. PR: 127172 (based on) Submitted by: bf <bf2006a@yahoo.com> Obtained from: python svn Security: CVE-2008-2315, CVE-2008-2316, CVE-2008-3142, CVE-2008-3144, CVE-2008-3143. (vuxml come later)
Diffstat (limited to 'lang')
-rw-r--r--lang/python25/Makefile2
-rw-r--r--lang/python25/files/patch-lib-test_test_bigmem.py163
-rw-r--r--lang/python25/files/patch-lib-test_test_hashlib.py41
-rw-r--r--lang/python25/files/patch-lib-test_test_strop.py28
-rw-r--r--lang/python25/files/patch-lib-test_test_support.py62
-rw-r--r--lang/python25/files/patch-lib_seq_tests.py21
-rw-r--r--lang/python25/files/patch-modules_almodule.c14
-rw-r--r--lang/python25/files/patch-modules_arraymodule.c33
-rw-r--r--lang/python25/files/patch-modules_gcmodule.c58
-rw-r--r--lang/python25/files/patch-modules_hashopenssl.c104
-rw-r--r--lang/python25/files/patch-modules_mmapmodule.c11
-rw-r--r--lang/python25/files/patch-modules_selectmodule.c16
-rw-r--r--lang/python25/files/patch-modules_stropmodule.c31
-rw-r--r--lang/python25/files/patch-objects_bufferobject.c13
-rw-r--r--lang/python25/files/patch-objects_longobject.c11
-rw-r--r--lang/python25/files/patch-objects_obmalloc.c34
-rw-r--r--lang/python25/files/patch-objects_stringobject.c49
-rw-r--r--lang/python25/files/patch-objects_tupleobject.c17
-rw-r--r--lang/python25/files/patch-objects_unicodeobject.c115
-rw-r--r--lang/python25/files/patch-python_mysnprintf.c55
-rw-r--r--lang/python26/Makefile2
-rw-r--r--lang/python26/files/patch-lib-test_test_bigmem.py163
-rw-r--r--lang/python26/files/patch-lib-test_test_hashlib.py41
-rw-r--r--lang/python26/files/patch-lib-test_test_strop.py28
-rw-r--r--lang/python26/files/patch-lib-test_test_support.py62
-rw-r--r--lang/python26/files/patch-lib_seq_tests.py21
-rw-r--r--lang/python26/files/patch-modules_almodule.c14
-rw-r--r--lang/python26/files/patch-modules_arraymodule.c33
-rw-r--r--lang/python26/files/patch-modules_gcmodule.c58
-rw-r--r--lang/python26/files/patch-modules_hashopenssl.c104
-rw-r--r--lang/python26/files/patch-modules_mmapmodule.c11
-rw-r--r--lang/python26/files/patch-modules_selectmodule.c16
-rw-r--r--lang/python26/files/patch-modules_stropmodule.c31
-rw-r--r--lang/python26/files/patch-objects_bufferobject.c13
-rw-r--r--lang/python26/files/patch-objects_longobject.c11
-rw-r--r--lang/python26/files/patch-objects_obmalloc.c34
-rw-r--r--lang/python26/files/patch-objects_stringobject.c49
-rw-r--r--lang/python26/files/patch-objects_tupleobject.c17
-rw-r--r--lang/python26/files/patch-objects_unicodeobject.c115
-rw-r--r--lang/python26/files/patch-python_mysnprintf.c55
-rw-r--r--lang/python27/Makefile2
-rw-r--r--lang/python27/files/patch-lib-test_test_bigmem.py163
-rw-r--r--lang/python27/files/patch-lib-test_test_hashlib.py41
-rw-r--r--lang/python27/files/patch-lib-test_test_strop.py28
-rw-r--r--lang/python27/files/patch-lib-test_test_support.py62
-rw-r--r--lang/python27/files/patch-lib_seq_tests.py21
-rw-r--r--lang/python27/files/patch-modules_almodule.c14
-rw-r--r--lang/python27/files/patch-modules_arraymodule.c33
-rw-r--r--lang/python27/files/patch-modules_gcmodule.c58
-rw-r--r--lang/python27/files/patch-modules_hashopenssl.c104
-rw-r--r--lang/python27/files/patch-modules_mmapmodule.c11
-rw-r--r--lang/python27/files/patch-modules_selectmodule.c16
-rw-r--r--lang/python27/files/patch-modules_stropmodule.c31
-rw-r--r--lang/python27/files/patch-objects_bufferobject.c13
-rw-r--r--lang/python27/files/patch-objects_longobject.c11
-rw-r--r--lang/python27/files/patch-objects_obmalloc.c34
-rw-r--r--lang/python27/files/patch-objects_stringobject.c49
-rw-r--r--lang/python27/files/patch-objects_tupleobject.c17
-rw-r--r--lang/python27/files/patch-objects_unicodeobject.c115
-rw-r--r--lang/python27/files/patch-python_mysnprintf.c55
-rw-r--r--lang/python30/Makefile2
-rw-r--r--lang/python30/files/patch-lib-test_test_bigmem.py163
-rw-r--r--lang/python30/files/patch-lib-test_test_hashlib.py41
-rw-r--r--lang/python30/files/patch-lib-test_test_strop.py28
-rw-r--r--lang/python30/files/patch-lib-test_test_support.py62
-rw-r--r--lang/python30/files/patch-lib_seq_tests.py21
-rw-r--r--lang/python30/files/patch-modules_almodule.c14
-rw-r--r--lang/python30/files/patch-modules_arraymodule.c33
-rw-r--r--lang/python30/files/patch-modules_gcmodule.c58
-rw-r--r--lang/python30/files/patch-modules_hashopenssl.c104
-rw-r--r--lang/python30/files/patch-modules_mmapmodule.c11
-rw-r--r--lang/python30/files/patch-modules_selectmodule.c16
-rw-r--r--lang/python30/files/patch-modules_stropmodule.c31
-rw-r--r--lang/python30/files/patch-objects_bufferobject.c13
-rw-r--r--lang/python30/files/patch-objects_longobject.c11
-rw-r--r--lang/python30/files/patch-objects_obmalloc.c34
-rw-r--r--lang/python30/files/patch-objects_stringobject.c49
-rw-r--r--lang/python30/files/patch-objects_tupleobject.c17
-rw-r--r--lang/python30/files/patch-objects_unicodeobject.c115
-rw-r--r--lang/python30/files/patch-python_mysnprintf.c55
-rw-r--r--lang/python31/Makefile2
-rw-r--r--lang/python31/files/patch-lib-test_test_bigmem.py163
-rw-r--r--lang/python31/files/patch-lib-test_test_hashlib.py41
-rw-r--r--lang/python31/files/patch-lib-test_test_strop.py28
-rw-r--r--lang/python31/files/patch-lib-test_test_support.py62
-rw-r--r--lang/python31/files/patch-lib_seq_tests.py21
-rw-r--r--lang/python31/files/patch-modules_almodule.c14
-rw-r--r--lang/python31/files/patch-modules_arraymodule.c33
-rw-r--r--lang/python31/files/patch-modules_gcmodule.c58
-rw-r--r--lang/python31/files/patch-modules_hashopenssl.c104
-rw-r--r--lang/python31/files/patch-modules_mmapmodule.c11
-rw-r--r--lang/python31/files/patch-modules_selectmodule.c16
-rw-r--r--lang/python31/files/patch-modules_stropmodule.c31
-rw-r--r--lang/python31/files/patch-objects_bufferobject.c13
-rw-r--r--lang/python31/files/patch-objects_longobject.c11
-rw-r--r--lang/python31/files/patch-objects_obmalloc.c34
-rw-r--r--lang/python31/files/patch-objects_stringobject.c49
-rw-r--r--lang/python31/files/patch-objects_tupleobject.c17
-rw-r--r--lang/python31/files/patch-objects_unicodeobject.c115
-rw-r--r--lang/python31/files/patch-python_mysnprintf.c55
-rw-r--r--lang/python32/Makefile2
-rw-r--r--lang/python32/files/patch-lib-test_test_bigmem.py163
-rw-r--r--lang/python32/files/patch-lib-test_test_hashlib.py41
-rw-r--r--lang/python32/files/patch-lib-test_test_strop.py28
-rw-r--r--lang/python32/files/patch-lib-test_test_support.py62
-rw-r--r--lang/python32/files/patch-lib_seq_tests.py21
-rw-r--r--lang/python32/files/patch-modules_almodule.c14
-rw-r--r--lang/python32/files/patch-modules_arraymodule.c33
-rw-r--r--lang/python32/files/patch-modules_gcmodule.c58
-rw-r--r--lang/python32/files/patch-modules_hashopenssl.c104
-rw-r--r--lang/python32/files/patch-modules_mmapmodule.c11
-rw-r--r--lang/python32/files/patch-modules_selectmodule.c16
-rw-r--r--lang/python32/files/patch-modules_stropmodule.c31
-rw-r--r--lang/python32/files/patch-objects_bufferobject.c13
-rw-r--r--lang/python32/files/patch-objects_longobject.c11
-rw-r--r--lang/python32/files/patch-objects_obmalloc.c34
-rw-r--r--lang/python32/files/patch-objects_stringobject.c49
-rw-r--r--lang/python32/files/patch-objects_tupleobject.c17
-rw-r--r--lang/python32/files/patch-objects_unicodeobject.c115
-rw-r--r--lang/python32/files/patch-python_mysnprintf.c55
120 files changed, 5262 insertions, 6 deletions
diff --git a/lang/python25/Makefile b/lang/python25/Makefile
index 6224b163004d..fa6cd7bd7061 100644
--- a/lang/python25/Makefile
+++ b/lang/python25/Makefile
@@ -6,7 +6,7 @@
PORTNAME= python25
PORTVERSION= 2.5.2
-PORTREVISION= 2
+PORTREVISION= 3
CATEGORIES= lang python ipv6
MASTER_SITES= ${PYTHON_MASTER_SITES}
MASTER_SITE_SUBDIR= ${PYTHON_MASTER_SITE_SUBDIR}
diff --git a/lang/python25/files/patch-lib-test_test_bigmem.py b/lang/python25/files/patch-lib-test_test_bigmem.py
new file mode 100644
index 000000000000..5ec5935a046f
--- /dev/null
+++ b/lang/python25/files/patch-lib-test_test_bigmem.py
@@ -0,0 +1,163 @@
+--- Lib/test/test_bigmem.py.orig 2007-11-30 21:53:17.000000000 +0000
++++ Lib/test/test_bigmem.py 2008-08-30 10:16:13.000000000 +0100
+@@ -1,5 +1,5 @@
+ from test import test_support
+-from test.test_support import bigmemtest, _1G, _2G
++from test.test_support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest
+
+ import unittest
+ import operator
+@@ -54,6 +54,22 @@
+ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
+ self.assertEquals(s.strip(), SUBSTR.strip())
+
++ @precisionbigmemtest(size=_2G - 1, memuse=1)
++ def test_center_unicode(self, size):
++ SUBSTR = u' abc def ghi'
++ try:
++ s = SUBSTR.center(size)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(s), size)
++ lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2
++ if len(s) % 2:
++ lpadsize += 1
++ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
++ self.assertEquals(s.strip(), SUBSTR.strip())
++ del s
++
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_count(self, size):
+ SUBSTR = ' abc def ghi'
+@@ -70,10 +86,44 @@
+ s = '.' * size
+ self.assertEquals(len(s.decode('utf-8')), size)
+
++ def basic_encode_test(self, size, enc, c=u'.', expectedsize=None):
++ if expectedsize is None:
++ expectedsize = size
++
++ s = c * size
++ self.assertEquals(len(s.encode(enc)), expectedsize)
++
+ @bigmemtest(minsize=_2G + 2, memuse=3)
+ def test_encode(self, size):
+- s = u'.' * size
+- self.assertEquals(len(s.encode('utf-8')), size)
++ return self.basic_encode_test(size, 'utf-8')
++
++ @precisionbigmemtest(size=_4G / 6 + 2, memuse=2)
++ def test_encode_raw_unicode_escape(self, size):
++ try:
++ return self.basic_encode_test(size, 'raw_unicode_escape')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_4G / 5 + 70, memuse=3)
++ def test_encode_utf7(self, size):
++ try:
++ return self.basic_encode_test(size, 'utf7')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_2G-1, memuse=2)
++ def test_decodeascii(self, size):
++ return self.basic_encode_test(size, 'ascii', c='A')
++
++ @precisionbigmemtest(size=_4G / 5, memuse=6+2)
++ def test_unicode_repr_oflw(self, size):
++ try:
++ s = u"\uAAAA"*size
++ r = repr(s)
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ self.failUnless(s == eval(r))
+
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_endswith(self, size):
+@@ -459,6 +509,11 @@
+ self.assertEquals(s.count('\\'), size)
+ self.assertEquals(s.count('0'), size * 2)
+
++ @bigmemtest(minsize=2**32 / 5, memuse=6+2)
++ def test_unicode_repr(self, size):
++ s = u"\uAAAA" * size
++ self.failUnless(len(repr(s)) > size)
++
+ # This test is meaningful even with size < 2G, as long as the
+ # doubled string is > 2G (but it tests more if both are > 2G :)
+ @bigmemtest(minsize=_1G + 2, memuse=3)
+@@ -642,6 +697,35 @@
+ def test_repeat_large(self, size):
+ return self.basic_test_repeat(size)
+
++ @bigmemtest(minsize=_1G - 1, memuse=12)
++ def test_repeat_large_2(self, size):
++ return self.basic_test_repeat(size)
++
++ @precisionbigmemtest(size=_1G - 1, memuse=9)
++ def test_from_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++
++ @precisionbigmemtest(size=_1G - 25, memuse=9)
++ def test_from_almost_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++ except MemoryError:
++ pass # acceptable, expected on 32-bit
++
+ # Like test_concat, split in two.
+ def basic_test_repr(self, size):
+ t = (0,) * size
+@@ -957,8 +1041,34 @@
+ self.assertEquals(l[:10], [1] * 10)
+ self.assertEquals(l[-10:], [5] * 10)
+
++class BufferTest(unittest.TestCase):
++
++ @precisionbigmemtest(size=_1G, memuse=4)
++ def test_repeat(self, size):
++ try:
++ b = buffer("AAAA")*size
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for c in b:
++ self.assertEquals(c, 'A')
++ count += 1
++ self.assertEquals(count, size*4)
++
+ def test_main():
+- test_support.run_unittest(StrTest, TupleTest, ListTest)
++ test_support.run_unittest(StrTest, TupleTest, ListTest, BufferTest)
++
++# Expected failures (crashers)
++# del StrTest.test_center_unicode
++del StrTest.test_decodeascii
++# del StrTest.test_encode_utf32
++# del StrTest.test_encode_utf7
++# del StrTest.test_encode_raw_unicode_escape
++#
++# del TupleTest.test_from_2G_generator
++#
++# del BufferTest.test_repeat
+
+ if __name__ == '__main__':
+ if len(sys.argv) > 1:
diff --git a/lang/python25/files/patch-lib-test_test_hashlib.py b/lang/python25/files/patch-lib-test_test_hashlib.py
new file mode 100644
index 000000000000..15ede2b17e8c
--- /dev/null
+++ b/lang/python25/files/patch-lib-test_test_hashlib.py
@@ -0,0 +1,41 @@
+--- Lib/test/test_hashlib.py.orig 2005-08-21 19:45:59.000000000 +0100
++++ Lib/test/test_hashlib.py 2008-08-30 10:43:27.000000000 +0100
+@@ -9,7 +9,7 @@
+ import hashlib
+ import unittest
+ from test import test_support
+-
++from test.test_support import _4G, precisionbigmemtest
+
+ def hexstr(s):
+ import string
+@@ -55,7 +55,6 @@
+ m2.update(aas + bees + cees)
+ self.assertEqual(m1.digest(), m2.digest())
+
+-
+ def check(self, name, data, digest):
+ # test the direct constructors
+ computed = getattr(hashlib, name)(data).hexdigest()
+@@ -75,6 +74,21 @@
+ self.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
+ 'd174ab98d277d9f5a5611c2c9f419d9f')
+
++ @precisionbigmemtest(size=_4G + 5, memuse=1)
++ def test_case_md5_huge(self, size):
++ if size == _4G + 5:
++ try:
++ self.check('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
++ except OverflowError:
++ pass # 32-bit arch
++
++ @precisionbigmemtest(size=_4G - 1, memuse=1)
++ def test_case_md5_uintmax(self, size):
++ if size == _4G - 1:
++ try:
++ self.check('md5', 'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
++ except OverflowError:
++ pass # 32-bit arch
+
+ # use the three examples from Federal Information Processing Standards
+ # Publication 180-1, Secure Hash Standard, 1995 April 17
diff --git a/lang/python25/files/patch-lib-test_test_strop.py b/lang/python25/files/patch-lib-test_test_strop.py
new file mode 100644
index 000000000000..f0e40166957a
--- /dev/null
+++ b/lang/python25/files/patch-lib-test_test_strop.py
@@ -0,0 +1,28 @@
+--- Lib/test/test_strop.py.orig 2002-07-31 00:27:12.000000000 +0100
++++ Lib/test/test_strop.py 2008-08-30 10:16:13.000000000 +0100
+@@ -115,6 +115,25 @@
+ strop.uppercase
+ strop.whitespace
+
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=5)
++ def test_stropjoin_huge_list(self, size):
++ a = "A" * size
++ try:
++ r = strop.join([a, a], a)
++ except OverflowError:
++ pass
++ else:
++ self.assertEquals(len(r), len(a) * 3)
++
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=1)
++ def test_stropjoin_huge_tup(self, size):
++ a = "A" * size
++ try:
++ r = strop.join((a, a), a)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(r), len(a) * 3)
+
+ transtable = '\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`xyzdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377'
+
diff --git a/lang/python25/files/patch-lib-test_test_support.py b/lang/python25/files/patch-lib-test_test_support.py
new file mode 100644
index 000000000000..b11933bc7eed
--- /dev/null
+++ b/lang/python25/files/patch-lib-test_test_support.py
@@ -0,0 +1,62 @@
+--- Lib/test/test_support.py.orig 2008-01-27 01:24:44.000000000 +0000
++++ Lib/test/test_support.py 2008-08-30 10:16:13.000000000 +0100
+@@ -33,6 +33,7 @@
+ use_resources = None # Flag set to [] by regrtest.py
+ max_memuse = 0 # Disable bigmem tests (they will still be run with
+ # small sizes, to make sure they work.)
++real_max_memuse = 0
+
+ # _original_stdout is meant to hold stdout at the time regrtest began.
+ # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
+@@ -323,6 +324,7 @@
+ _1M = 1024*1024
+ _1G = 1024 * _1M
+ _2G = 2 * _1G
++_4G = 4 * _1G
+
+ # Hack to get at the maximum value an internal index can take.
+ class _Dummy:
+@@ -333,6 +335,7 @@
+ def set_memlimit(limit):
+ import re
+ global max_memuse
++ global real_max_memuse
+ sizes = {
+ 'k': 1024,
+ 'm': _1M,
+@@ -344,6 +347,7 @@
+ if m is None:
+ raise ValueError('Invalid memory limit %r' % (limit,))
+ memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
++ real_max_memuse = memlimit
+ if memlimit > MAX_Py_ssize_t:
+ memlimit = MAX_Py_ssize_t
+ if memlimit < _2G - 1:
+@@ -389,6 +393,27 @@
+ return wrapper
+ return decorator
+
++def precisionbigmemtest(size, memuse, overhead=5*_1M):
++ def decorator(f):
++ def wrapper(self):
++ if not real_max_memuse:
++ maxsize = 5147
++ else:
++ maxsize = size
++
++ if real_max_memuse and real_max_memuse < maxsize * memuse:
++ if verbose:
++ sys.stderr.write("Skipping %s because of memory "
++ "constraint\n" % (f.__name__,))
++ return
++
++ return f(self, maxsize)
++ wrapper.size = size
++ wrapper.memuse = memuse
++ wrapper.overhead = overhead
++ return wrapper
++ return decorator
++
+ def bigaddrspacetest(f):
+ """Decorator for tests that fill the address space."""
+ def wrapper(self):
diff --git a/lang/python25/files/patch-lib_seq_tests.py b/lang/python25/files/patch-lib_seq_tests.py
new file mode 100644
index 000000000000..9be35ae82517
--- /dev/null
+++ b/lang/python25/files/patch-lib_seq_tests.py
@@ -0,0 +1,21 @@
+--- Lib/test/seq_tests.py.orig 2007-11-12 20:04:41.000000000 +0000
++++ Lib/test/seq_tests.py 2008-08-30 10:16:13.000000000 +0100
+@@ -307,11 +307,13 @@
+ self.assertEqual(id(s), id(s*1))
+
+ def test_bigrepeat(self):
+- x = self.type2test([0])
+- x *= 2**16
+- self.assertRaises(MemoryError, x.__mul__, 2**16)
+- if hasattr(x, '__imul__'):
+- self.assertRaises(MemoryError, x.__imul__, 2**16)
++ import sys
++ if sys.maxint <= 2147483647:
++ x = self.type2test([0])
++ x *= 2**16
++ self.assertRaises(MemoryError, x.__mul__, 2**16)
++ if hasattr(x, '__imul__'):
++ self.assertRaises(MemoryError, x.__imul__, 2**16)
+
+ def test_subscript(self):
+ a = self.type2test([10, 11])
diff --git a/lang/python25/files/patch-modules_almodule.c b/lang/python25/files/patch-modules_almodule.c
new file mode 100644
index 000000000000..8e3b9b11380d
--- /dev/null
+++ b/lang/python25/files/patch-modules_almodule.c
@@ -0,0 +1,14 @@
+--- Modules/almodule.c.orig 2006-09-25 07:53:42.000000000 +0100
++++ Modules/almodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -1633,9 +1633,11 @@
+ if (nvals < 0)
+ goto cleanup;
+ if (nvals > setsize) {
++ ALvalue *old_return_set = return_set;
+ setsize = nvals;
+ PyMem_RESIZE(return_set, ALvalue, setsize);
+ if (return_set == NULL) {
++ return_set = old_return_set;
+ PyErr_NoMemory();
+ goto cleanup;
+ }
diff --git a/lang/python25/files/patch-modules_arraymodule.c b/lang/python25/files/patch-modules_arraymodule.c
new file mode 100644
index 000000000000..738ee48f1485
--- /dev/null
+++ b/lang/python25/files/patch-modules_arraymodule.c
@@ -0,0 +1,33 @@
+--- Modules/arraymodule.c.orig 2008-02-15 19:11:46.000000000 +0000
++++ Modules/arraymodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -816,6 +816,7 @@
+ array_do_extend(arrayobject *self, PyObject *bb)
+ {
+ Py_ssize_t size;
++ char *old_item;
+
+ if (!array_Check(bb))
+ return array_iter_extend(self, bb);
+@@ -831,10 +832,11 @@
+ return -1;
+ }
+ size = self->ob_size + b->ob_size;
++ old_item = self->ob_item;
+ PyMem_RESIZE(self->ob_item, char, size*self->ob_descr->itemsize);
+ if (self->ob_item == NULL) {
+- PyObject_Del(self);
+- PyErr_NoMemory();
++ self->ob_item = old_item;
++ PyErr_NoMemory();
+ return -1;
+ }
+ memcpy(self->ob_item + self->ob_size*self->ob_descr->itemsize,
+@@ -886,7 +888,7 @@
+ if (size > PY_SSIZE_T_MAX / n) {
+ return PyErr_NoMemory();
+ }
+- PyMem_Resize(items, char, n * size);
++ PyMem_RESIZE(items, char, n * size);
+ if (items == NULL)
+ return PyErr_NoMemory();
+ p = items;
diff --git a/lang/python25/files/patch-modules_gcmodule.c b/lang/python25/files/patch-modules_gcmodule.c
new file mode 100644
index 000000000000..59253930e9c3
--- /dev/null
+++ b/lang/python25/files/patch-modules_gcmodule.c
@@ -0,0 +1,58 @@
+--- Include/pymem.h.orig 2008-02-14 11:26:18.000000000 +0000
++++ Include/pymem.h 2008-08-30 10:39:43.000000000 +0100
+@@ -67,8 +67,12 @@
+ for malloc(0), which would be treated as an error. Some platforms
+ would return a pointer with no memory behind it, which would break
+ pymalloc. To solve these problems, allocate an extra byte. */
+-#define PyMem_MALLOC(n) malloc((n) ? (n) : 1)
+-#define PyMem_REALLOC(p, n) realloc((p), (n) ? (n) : 1)
++/* Returns NULL to indicate error if a negative size or size larger than
++ Py_ssize_t can represent is supplied. Helps prevents security holes. */
++#define PyMem_MALLOC(n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : malloc((n) ? (n) : 1))
++#define PyMem_REALLOC(p, n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : realloc((p), (n) ? (n) : 1))
+ #define PyMem_FREE free
+
+ #endif /* PYMALLOC_DEBUG */
+@@ -77,24 +81,31 @@
+ * Type-oriented memory interface
+ * ==============================
+ *
+- * These are carried along for historical reasons. There's rarely a good
+- * reason to use them anymore (you can just as easily do the multiply and
+- * cast yourself).
++ * Allocate memory for n objects of the given type. Returns a new pointer
++ * or NULL if the request was too large or memory allocation failed. Use
++ * these macros rather than doing the multiplication yourself so that proper
++ * overflow checking is always done.
+ */
+
+ #define PyMem_New(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_Malloc((n) * sizeof(type)) ) )
+ #define PyMem_NEW(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_MALLOC((n) * sizeof(type)) ) )
+
++/*
++ * The value of (p) is always clobbered by this macro regardless of success.
++ * The caller MUST check if (p) is NULL afterwards and deal with the memory
++ * error if so. This means the original value of (p) MUST be saved for the
++ * caller's memory error handler to not lose track of it.
++ */
+ #define PyMem_Resize(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_Realloc((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_Realloc((p), (n) * sizeof(type)) )
+ #define PyMem_RESIZE(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_REALLOC((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_REALLOC((p), (n) * sizeof(type)) )
+
+ /* PyMem{Del,DEL} are left over from ancient days, and shouldn't be used
+ * anymore. They're just confusing aliases for PyMem_{Free,FREE} now.
diff --git a/lang/python25/files/patch-modules_hashopenssl.c b/lang/python25/files/patch-modules_hashopenssl.c
new file mode 100644
index 000000000000..1f0cd56a244a
--- /dev/null
+++ b/lang/python25/files/patch-modules_hashopenssl.c
@@ -0,0 +1,104 @@
+--- Modules/_hashopenssl.c.orig 2006-05-29 22:04:52.000000000 +0100
++++ Modules/_hashopenssl.c 2008-08-30 10:43:27.000000000 +0100
+@@ -19,6 +19,8 @@
+ /* EVP is the preferred interface to hashing in OpenSSL */
+ #include <openssl/evp.h>
+
++#define MUNCH_SIZE INT_MAX
++
+
+ #ifndef HASH_OBJ_CONSTRUCTOR
+ #define HASH_OBJ_CONSTRUCTOR 0
+@@ -164,9 +166,18 @@
+ if (!PyArg_ParseTuple(args, "s#:update", &cp, &len))
+ return NULL;
+
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
+-
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+@@ -255,9 +266,20 @@
+ self->name = name_obj;
+ Py_INCREF(self->name);
+
+- if (cp && len)
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return 0;
+ }
+@@ -328,7 +350,7 @@
+ static PyObject *
+ EVPnew(PyObject *name_obj,
+ const EVP_MD *digest, const EVP_MD_CTX *initial_ctx,
+- const unsigned char *cp, unsigned int len)
++ const unsigned char *cp, Py_ssize_t len)
+ {
+ EVPobject *self;
+
+@@ -346,8 +368,20 @@
+ EVP_DigestInit(&self->ctx, digest);
+ }
+
+- if (cp && len)
+- EVP_DigestUpdate(&self->ctx, cp, len);
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
++ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
++ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return (PyObject *)self;
+ }
+@@ -384,8 +418,7 @@
+
+ digest = EVP_get_digestbyname(name);
+
+- return EVPnew(name_obj, digest, NULL, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+- unsigned int));
++ return EVPnew(name_obj, digest, NULL, cp, len);
+ }
+
+ /*
+@@ -410,7 +443,7 @@
+ CONST_ ## NAME ## _name_obj, \
+ NULL, \
+ CONST_new_ ## NAME ## _ctx_p, \
+- cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, unsigned int)); \
++ cp, len); \
+ }
+
+ /* a PyMethodDef structure for the constructor */
diff --git a/lang/python25/files/patch-modules_mmapmodule.c b/lang/python25/files/patch-modules_mmapmodule.c
new file mode 100644
index 000000000000..60f3d71ff349
--- /dev/null
+++ b/lang/python25/files/patch-modules_mmapmodule.c
@@ -0,0 +1,11 @@
+--- Modules/mmapmodule.c.orig 2006-08-22 14:57:07.000000000 +0100
++++ Modules/mmapmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -223,7 +223,7 @@
+ return(NULL);
+
+ /* silently 'adjust' out-of-range requests */
+- if ((self->pos + num_bytes) > self->size) {
++ if (num_bytes > self->size - self->pos) {
+ num_bytes -= (self->pos+num_bytes) - self->size;
+ }
+ result = Py_BuildValue("s#", self->data+self->pos, num_bytes);
diff --git a/lang/python25/files/patch-modules_selectmodule.c b/lang/python25/files/patch-modules_selectmodule.c
new file mode 100644
index 000000000000..446241f05a97
--- /dev/null
+++ b/lang/python25/files/patch-modules_selectmodule.c
@@ -0,0 +1,16 @@
+--- Modules/selectmodule.c.orig 2006-07-10 02:18:57.000000000 +0100
++++ Modules/selectmodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -349,10 +349,12 @@
+ {
+ Py_ssize_t i, pos;
+ PyObject *key, *value;
++ struct pollfd *old_ufds = self->ufds;
+
+ self->ufd_len = PyDict_Size(self->dict);
+- PyMem_Resize(self->ufds, struct pollfd, self->ufd_len);
++ PyMem_RESIZE(self->ufds, struct pollfd, self->ufd_len);
+ if (self->ufds == NULL) {
++ self->ufds = old_ufds;
+ PyErr_NoMemory();
+ return 0;
+ }
diff --git a/lang/python25/files/patch-modules_stropmodule.c b/lang/python25/files/patch-modules_stropmodule.c
new file mode 100644
index 000000000000..d7f42ce18871
--- /dev/null
+++ b/lang/python25/files/patch-modules_stropmodule.c
@@ -0,0 +1,31 @@
+--- Modules/stropmodule.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Modules/stropmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -216,6 +216,13 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0)
+ return NULL;
+@@ -253,6 +260,14 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ Py_XDECREF(item);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0) {
+ Py_DECREF(item);
diff --git a/lang/python25/files/patch-objects_bufferobject.c b/lang/python25/files/patch-objects_bufferobject.c
new file mode 100644
index 000000000000..16e99568ad9a
--- /dev/null
+++ b/lang/python25/files/patch-objects_bufferobject.c
@@ -0,0 +1,13 @@
+--- Objects/bufferobject.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/bufferobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -427,6 +427,10 @@
+ count = 0;
+ if (!get_buf(self, &ptr, &size, ANY_BUFFER))
+ return NULL;
++ if (count > PY_SSIZE_T_MAX / size) {
++ PyErr_SetString(PyExc_MemoryError, "result too large");
++ return NULL;
++ }
+ ob = PyString_FromStringAndSize(NULL, size * count);
+ if ( ob == NULL )
+ return NULL;
diff --git a/lang/python25/files/patch-objects_longobject.c b/lang/python25/files/patch-objects_longobject.c
new file mode 100644
index 000000000000..1221db9fed0a
--- /dev/null
+++ b/lang/python25/files/patch-objects_longobject.c
@@ -0,0 +1,11 @@
+--- Objects/longobject.c.orig 2007-05-07 19:30:48.000000000 +0100
++++ Objects/longobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -70,6 +70,8 @@
+ PyErr_NoMemory();
+ return NULL;
+ }
++ /* XXX(nnorwitz): This can overflow --
++ PyObject_NEW_VAR / _PyObject_VAR_SIZE need to detect overflow */
+ return PyObject_NEW_VAR(PyLongObject, &PyLong_Type, size);
+ }
+
diff --git a/lang/python25/files/patch-objects_obmalloc.c b/lang/python25/files/patch-objects_obmalloc.c
new file mode 100644
index 000000000000..27050596fbfc
--- /dev/null
+++ b/lang/python25/files/patch-objects_obmalloc.c
@@ -0,0 +1,34 @@
+--- Objects/obmalloc.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/obmalloc.c 2008-08-30 10:39:43.000000000 +0100
+@@ -727,6 +727,15 @@
+ uint size;
+
+ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
++ /*
+ * This implicitly redirects malloc(0).
+ */
+ if ((nbytes - 1) < SMALL_REQUEST_THRESHOLD) {
+@@ -1130,6 +1139,15 @@
+ if (p == NULL)
+ return PyObject_Malloc(nbytes);
+
++ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
+ pool = POOL_ADDR(p);
+ if (Py_ADDRESS_IN_RANGE(p, pool)) {
+ /* We're in charge of this block */
diff --git a/lang/python25/files/patch-objects_stringobject.c b/lang/python25/files/patch-objects_stringobject.c
new file mode 100644
index 000000000000..af55c78a4928
--- /dev/null
+++ b/lang/python25/files/patch-objects_stringobject.c
@@ -0,0 +1,49 @@
+--- Objects/stringobject.c.orig 2007-11-07 01:19:49.000000000 +0000
++++ Objects/stringobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -71,6 +71,11 @@
+ return (PyObject *)op;
+ }
+
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError, "string is too large");
++ return NULL;
++ }
++
+ /* Inline PyObject_NewVar */
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+@@ -106,7 +111,7 @@
+
+ assert(str != NULL);
+ size = strlen(str);
+- if (size > PY_SSIZE_T_MAX) {
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
+ PyErr_SetString(PyExc_OverflowError,
+ "string is too long for a Python string");
+ return NULL;
+@@ -967,14 +972,24 @@
+ Py_INCREF(a);
+ return (PyObject *)a;
+ }
++ /* Check that string sizes are not negative, to prevent an
++ overflow in cases where we are passed incorrectly-created
++ strings with negative lengths (due to a bug in other code).
++ */
+ size = a->ob_size + b->ob_size;
+- if (size < 0) {
++ if (a->ob_size < 0 || b->ob_size < 0 ||
++ a->ob_size > PY_SSIZE_T_MAX - b->ob_size) {
+ PyErr_SetString(PyExc_OverflowError,
+ "strings are too large to concat");
+ return NULL;
+ }
+
+ /* Inline PyObject_NewVar */
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError,
++ "strings are too large to concat");
++ return NULL;
++ }
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+ return PyErr_NoMemory();
diff --git a/lang/python25/files/patch-objects_tupleobject.c b/lang/python25/files/patch-objects_tupleobject.c
new file mode 100644
index 000000000000..eb133b6e002c
--- /dev/null
+++ b/lang/python25/files/patch-objects_tupleobject.c
@@ -0,0 +1,17 @@
+--- Objects/tupleobject.c.orig 2006-08-12 18:03:09.000000000 +0100
++++ Objects/tupleobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -60,11 +60,12 @@
+ Py_ssize_t nbytes = size * sizeof(PyObject *);
+ /* Check for overflow */
+ if (nbytes / sizeof(PyObject *) != (size_t)size ||
+- (nbytes += sizeof(PyTupleObject) - sizeof(PyObject *))
+- <= 0)
++ (nbytes > PY_SSIZE_T_MAX - sizeof(PyTupleObject) - sizeof(PyObject *)))
+ {
+ return PyErr_NoMemory();
+ }
++ nbytes += sizeof(PyTupleObject) - sizeof(PyObject *);
++
+ op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
+ if (op == NULL)
+ return NULL;
diff --git a/lang/python25/files/patch-objects_unicodeobject.c b/lang/python25/files/patch-objects_unicodeobject.c
new file mode 100644
index 000000000000..85e88caae0e3
--- /dev/null
+++ b/lang/python25/files/patch-objects_unicodeobject.c
@@ -0,0 +1,115 @@
+--- Objects/unicodeobject.c.orig 2007-11-02 22:46:38.000000000 +0000
++++ Objects/unicodeobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -239,6 +239,11 @@
+ return unicode_empty;
+ }
+
++ /* Ensure we won't overflow the size. */
++ if (length > ((PY_SSIZE_T_MAX / sizeof(Py_UNICODE)) - 1)) {
++ return (PyUnicodeObject *)PyErr_NoMemory();
++ }
++
+ /* Unicode freelist & memory allocation */
+ if (unicode_freelist) {
+ unicode = unicode_freelist;
+@@ -1091,6 +1096,9 @@
+ char * out;
+ char * start;
+
++ if (cbAllocated / 5 != size)
++ return PyErr_NoMemory();
++
+ if (size == 0)
+ return PyString_FromStringAndSize(NULL, 0);
+
+@@ -1689,8 +1697,9 @@
+ {
+ PyObject *v;
+ unsigned char *p;
++ Py_ssize_t nsize, bytesize;
+ #ifdef Py_UNICODE_WIDE
+- int i, pairs;
++ Py_ssize_t i, pairs;
+ #else
+ const int pairs = 0;
+ #endif
+@@ -1713,8 +1722,15 @@
+ if (s[i] >= 0x10000)
+ pairs++;
+ #endif
+- v = PyString_FromStringAndSize(NULL,
+- 2 * (size + pairs + (byteorder == 0)));
++ /* 2 * (size + pairs + (byteorder == 0)) */
++ if (size > PY_SSIZE_T_MAX ||
++ size > PY_SSIZE_T_MAX - pairs - (byteorder == 0))
++ return PyErr_NoMemory();
++ nsize = (size + pairs + (byteorder == 0));
++ bytesize = nsize * 2;
++ if (bytesize / 2 != nsize)
++ return PyErr_NoMemory();
++ v = PyString_FromStringAndSize(NULL, bytesize);
+ if (v == NULL)
+ return NULL;
+
+@@ -2042,6 +2058,11 @@
+ char *p;
+
+ static const char *hexdigit = "0123456789abcdef";
++#ifdef Py_UNICODE_WIDE
++ const Py_ssize_t expandsize = 10;
++#else
++ const Py_ssize_t expandsize = 6;
++#endif
+
+ /* Initial allocation is based on the longest-possible unichr
+ escape.
+@@ -2057,13 +2078,12 @@
+ escape.
+ */
+
++ if (size > (PY_SSIZE_T_MAX - 2 - 1) / expandsize)
++ return PyErr_NoMemory();
++
+ repr = PyString_FromStringAndSize(NULL,
+ 2
+-#ifdef Py_UNICODE_WIDE
+- + 10*size
+-#else
+- + 6*size
+-#endif
++ + expandsize*size
+ + 1);
+ if (repr == NULL)
+ return NULL;
+@@ -2304,12 +2324,16 @@
+ char *q;
+
+ static const char *hexdigit = "0123456789abcdef";
+-
+ #ifdef Py_UNICODE_WIDE
+- repr = PyString_FromStringAndSize(NULL, 10 * size);
++ const Py_ssize_t expandsize = 10;
+ #else
+- repr = PyString_FromStringAndSize(NULL, 6 * size);
++ const Py_ssize_t expandsize = 6;
+ #endif
++
++ if (size > PY_SSIZE_T_MAX / expandsize)
++ return PyErr_NoMemory();
++
++ repr = PyString_FromStringAndSize(NULL, expandsize * size);
+ if (repr == NULL)
+ return NULL;
+ if (size == 0)
+@@ -4719,6 +4743,11 @@
+ return self;
+ }
+
++ if (left > PY_SSIZE_T_MAX - self->length ||
++ right > PY_SSIZE_T_MAX - (left + self->length)) {
++ PyErr_SetString(PyExc_OverflowError, "padded string is too long");
++ return NULL;
++ }
+ u = _PyUnicode_New(left + self->length + right);
+ if (u) {
+ if (left)
diff --git a/lang/python25/files/patch-python_mysnprintf.c b/lang/python25/files/patch-python_mysnprintf.c
new file mode 100644
index 000000000000..276dd21a1b31
--- /dev/null
+++ b/lang/python25/files/patch-python_mysnprintf.c
@@ -0,0 +1,55 @@
+--- Python/mysnprintf.c.orig 2001-12-21 16:32:15.000000000 +0000
++++ Python/mysnprintf.c 2008-08-30 10:46:31.000000000 +0100
+@@ -54,18 +54,28 @@
+ PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va)
+ {
+ int len; /* # bytes written, excluding \0 */
+-#ifndef HAVE_SNPRINTF
++#ifdef HAVE_SNPRINTF
++#define _PyOS_vsnprintf_EXTRA_SPACE 1
++#else
++#define _PyOS_vsnprintf_EXTRA_SPACE 512
+ char *buffer;
+ #endif
+ assert(str != NULL);
+ assert(size > 0);
+ assert(format != NULL);
++ /* We take a size_t as input but return an int. Sanity check
++ * our input so that it won't cause an overflow in the
++ * vsnprintf return value or the buffer malloc size. */
++ if (size > INT_MAX - _PyOS_vsnprintf_EXTRA_SPACE) {
++ len = -666;
++ goto Done;
++ }
+
+ #ifdef HAVE_SNPRINTF
+ len = vsnprintf(str, size, format, va);
+ #else
+ /* Emulate it. */
+- buffer = PyMem_MALLOC(size + 512);
++ buffer = PyMem_MALLOC(size + _PyOS_vsnprintf_EXTRA_SPACE);
+ if (buffer == NULL) {
+ len = -666;
+ goto Done;
+@@ -75,7 +85,7 @@
+ if (len < 0)
+ /* ignore the error */;
+
+- else if ((size_t)len >= size + 512)
++ else if ((size_t)len >= size + _PyOS_vsnprintf_EXTRA_SPACE)
+ Py_FatalError("Buffer overflow in PyOS_snprintf/PyOS_vsnprintf");
+
+ else {
+@@ -86,8 +96,10 @@
+ str[to_copy] = '\0';
+ }
+ PyMem_FREE(buffer);
+-Done:
+ #endif
+- str[size-1] = '\0';
++Done:
++ if (size > 0)
++ str[size-1] = '\0';
+ return len;
++#undef _PyOS_vsnprintf_EXTRA_SPACE
+ }
diff --git a/lang/python26/Makefile b/lang/python26/Makefile
index 6224b163004d..fa6cd7bd7061 100644
--- a/lang/python26/Makefile
+++ b/lang/python26/Makefile
@@ -6,7 +6,7 @@
PORTNAME= python25
PORTVERSION= 2.5.2
-PORTREVISION= 2
+PORTREVISION= 3
CATEGORIES= lang python ipv6
MASTER_SITES= ${PYTHON_MASTER_SITES}
MASTER_SITE_SUBDIR= ${PYTHON_MASTER_SITE_SUBDIR}
diff --git a/lang/python26/files/patch-lib-test_test_bigmem.py b/lang/python26/files/patch-lib-test_test_bigmem.py
new file mode 100644
index 000000000000..5ec5935a046f
--- /dev/null
+++ b/lang/python26/files/patch-lib-test_test_bigmem.py
@@ -0,0 +1,163 @@
+--- Lib/test/test_bigmem.py.orig 2007-11-30 21:53:17.000000000 +0000
++++ Lib/test/test_bigmem.py 2008-08-30 10:16:13.000000000 +0100
+@@ -1,5 +1,5 @@
+ from test import test_support
+-from test.test_support import bigmemtest, _1G, _2G
++from test.test_support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest
+
+ import unittest
+ import operator
+@@ -54,6 +54,22 @@
+ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
+ self.assertEquals(s.strip(), SUBSTR.strip())
+
++ @precisionbigmemtest(size=_2G - 1, memuse=1)
++ def test_center_unicode(self, size):
++ SUBSTR = u' abc def ghi'
++ try:
++ s = SUBSTR.center(size)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(s), size)
++ lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2
++ if len(s) % 2:
++ lpadsize += 1
++ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
++ self.assertEquals(s.strip(), SUBSTR.strip())
++ del s
++
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_count(self, size):
+ SUBSTR = ' abc def ghi'
+@@ -70,10 +86,44 @@
+ s = '.' * size
+ self.assertEquals(len(s.decode('utf-8')), size)
+
++ def basic_encode_test(self, size, enc, c=u'.', expectedsize=None):
++ if expectedsize is None:
++ expectedsize = size
++
++ s = c * size
++ self.assertEquals(len(s.encode(enc)), expectedsize)
++
+ @bigmemtest(minsize=_2G + 2, memuse=3)
+ def test_encode(self, size):
+- s = u'.' * size
+- self.assertEquals(len(s.encode('utf-8')), size)
++ return self.basic_encode_test(size, 'utf-8')
++
++ @precisionbigmemtest(size=_4G / 6 + 2, memuse=2)
++ def test_encode_raw_unicode_escape(self, size):
++ try:
++ return self.basic_encode_test(size, 'raw_unicode_escape')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_4G / 5 + 70, memuse=3)
++ def test_encode_utf7(self, size):
++ try:
++ return self.basic_encode_test(size, 'utf7')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_2G-1, memuse=2)
++ def test_decodeascii(self, size):
++ return self.basic_encode_test(size, 'ascii', c='A')
++
++ @precisionbigmemtest(size=_4G / 5, memuse=6+2)
++ def test_unicode_repr_oflw(self, size):
++ try:
++ s = u"\uAAAA"*size
++ r = repr(s)
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ self.failUnless(s == eval(r))
+
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_endswith(self, size):
+@@ -459,6 +509,11 @@
+ self.assertEquals(s.count('\\'), size)
+ self.assertEquals(s.count('0'), size * 2)
+
++ @bigmemtest(minsize=2**32 / 5, memuse=6+2)
++ def test_unicode_repr(self, size):
++ s = u"\uAAAA" * size
++ self.failUnless(len(repr(s)) > size)
++
+ # This test is meaningful even with size < 2G, as long as the
+ # doubled string is > 2G (but it tests more if both are > 2G :)
+ @bigmemtest(minsize=_1G + 2, memuse=3)
+@@ -642,6 +697,35 @@
+ def test_repeat_large(self, size):
+ return self.basic_test_repeat(size)
+
++ @bigmemtest(minsize=_1G - 1, memuse=12)
++ def test_repeat_large_2(self, size):
++ return self.basic_test_repeat(size)
++
++ @precisionbigmemtest(size=_1G - 1, memuse=9)
++ def test_from_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++
++ @precisionbigmemtest(size=_1G - 25, memuse=9)
++ def test_from_almost_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++ except MemoryError:
++ pass # acceptable, expected on 32-bit
++
+ # Like test_concat, split in two.
+ def basic_test_repr(self, size):
+ t = (0,) * size
+@@ -957,8 +1041,34 @@
+ self.assertEquals(l[:10], [1] * 10)
+ self.assertEquals(l[-10:], [5] * 10)
+
++class BufferTest(unittest.TestCase):
++
++ @precisionbigmemtest(size=_1G, memuse=4)
++ def test_repeat(self, size):
++ try:
++ b = buffer("AAAA")*size
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for c in b:
++ self.assertEquals(c, 'A')
++ count += 1
++ self.assertEquals(count, size*4)
++
+ def test_main():
+- test_support.run_unittest(StrTest, TupleTest, ListTest)
++ test_support.run_unittest(StrTest, TupleTest, ListTest, BufferTest)
++
++# Expected failures (crashers)
++# del StrTest.test_center_unicode
++del StrTest.test_decodeascii
++# del StrTest.test_encode_utf32
++# del StrTest.test_encode_utf7
++# del StrTest.test_encode_raw_unicode_escape
++#
++# del TupleTest.test_from_2G_generator
++#
++# del BufferTest.test_repeat
+
+ if __name__ == '__main__':
+ if len(sys.argv) > 1:
diff --git a/lang/python26/files/patch-lib-test_test_hashlib.py b/lang/python26/files/patch-lib-test_test_hashlib.py
new file mode 100644
index 000000000000..15ede2b17e8c
--- /dev/null
+++ b/lang/python26/files/patch-lib-test_test_hashlib.py
@@ -0,0 +1,41 @@
+--- Lib/test/test_hashlib.py.orig 2005-08-21 19:45:59.000000000 +0100
++++ Lib/test/test_hashlib.py 2008-08-30 10:43:27.000000000 +0100
+@@ -9,7 +9,7 @@
+ import hashlib
+ import unittest
+ from test import test_support
+-
++from test.test_support import _4G, precisionbigmemtest
+
+ def hexstr(s):
+ import string
+@@ -55,7 +55,6 @@
+ m2.update(aas + bees + cees)
+ self.assertEqual(m1.digest(), m2.digest())
+
+-
+ def check(self, name, data, digest):
+ # test the direct constructors
+ computed = getattr(hashlib, name)(data).hexdigest()
+@@ -75,6 +74,21 @@
+ self.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
+ 'd174ab98d277d9f5a5611c2c9f419d9f')
+
++ @precisionbigmemtest(size=_4G + 5, memuse=1)
++ def test_case_md5_huge(self, size):
++ if size == _4G + 5:
++ try:
++ self.check('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
++ except OverflowError:
++ pass # 32-bit arch
++
++ @precisionbigmemtest(size=_4G - 1, memuse=1)
++ def test_case_md5_uintmax(self, size):
++ if size == _4G - 1:
++ try:
++ self.check('md5', 'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
++ except OverflowError:
++ pass # 32-bit arch
+
+ # use the three examples from Federal Information Processing Standards
+ # Publication 180-1, Secure Hash Standard, 1995 April 17
diff --git a/lang/python26/files/patch-lib-test_test_strop.py b/lang/python26/files/patch-lib-test_test_strop.py
new file mode 100644
index 000000000000..f0e40166957a
--- /dev/null
+++ b/lang/python26/files/patch-lib-test_test_strop.py
@@ -0,0 +1,28 @@
+--- Lib/test/test_strop.py.orig 2002-07-31 00:27:12.000000000 +0100
++++ Lib/test/test_strop.py 2008-08-30 10:16:13.000000000 +0100
+@@ -115,6 +115,25 @@
+ strop.uppercase
+ strop.whitespace
+
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=5)
++ def test_stropjoin_huge_list(self, size):
++ a = "A" * size
++ try:
++ r = strop.join([a, a], a)
++ except OverflowError:
++ pass
++ else:
++ self.assertEquals(len(r), len(a) * 3)
++
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=1)
++ def test_stropjoin_huge_tup(self, size):
++ a = "A" * size
++ try:
++ r = strop.join((a, a), a)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(r), len(a) * 3)
+
+ transtable = '\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`xyzdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377'
+
diff --git a/lang/python26/files/patch-lib-test_test_support.py b/lang/python26/files/patch-lib-test_test_support.py
new file mode 100644
index 000000000000..b11933bc7eed
--- /dev/null
+++ b/lang/python26/files/patch-lib-test_test_support.py
@@ -0,0 +1,62 @@
+--- Lib/test/test_support.py.orig 2008-01-27 01:24:44.000000000 +0000
++++ Lib/test/test_support.py 2008-08-30 10:16:13.000000000 +0100
+@@ -33,6 +33,7 @@
+ use_resources = None # Flag set to [] by regrtest.py
+ max_memuse = 0 # Disable bigmem tests (they will still be run with
+ # small sizes, to make sure they work.)
++real_max_memuse = 0
+
+ # _original_stdout is meant to hold stdout at the time regrtest began.
+ # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
+@@ -323,6 +324,7 @@
+ _1M = 1024*1024
+ _1G = 1024 * _1M
+ _2G = 2 * _1G
++_4G = 4 * _1G
+
+ # Hack to get at the maximum value an internal index can take.
+ class _Dummy:
+@@ -333,6 +335,7 @@
+ def set_memlimit(limit):
+ import re
+ global max_memuse
++ global real_max_memuse
+ sizes = {
+ 'k': 1024,
+ 'm': _1M,
+@@ -344,6 +347,7 @@
+ if m is None:
+ raise ValueError('Invalid memory limit %r' % (limit,))
+ memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
++ real_max_memuse = memlimit
+ if memlimit > MAX_Py_ssize_t:
+ memlimit = MAX_Py_ssize_t
+ if memlimit < _2G - 1:
+@@ -389,6 +393,27 @@
+ return wrapper
+ return decorator
+
++def precisionbigmemtest(size, memuse, overhead=5*_1M):
++ def decorator(f):
++ def wrapper(self):
++ if not real_max_memuse:
++ maxsize = 5147
++ else:
++ maxsize = size
++
++ if real_max_memuse and real_max_memuse < maxsize * memuse:
++ if verbose:
++ sys.stderr.write("Skipping %s because of memory "
++ "constraint\n" % (f.__name__,))
++ return
++
++ return f(self, maxsize)
++ wrapper.size = size
++ wrapper.memuse = memuse
++ wrapper.overhead = overhead
++ return wrapper
++ return decorator
++
+ def bigaddrspacetest(f):
+ """Decorator for tests that fill the address space."""
+ def wrapper(self):
diff --git a/lang/python26/files/patch-lib_seq_tests.py b/lang/python26/files/patch-lib_seq_tests.py
new file mode 100644
index 000000000000..9be35ae82517
--- /dev/null
+++ b/lang/python26/files/patch-lib_seq_tests.py
@@ -0,0 +1,21 @@
+--- Lib/test/seq_tests.py.orig 2007-11-12 20:04:41.000000000 +0000
++++ Lib/test/seq_tests.py 2008-08-30 10:16:13.000000000 +0100
+@@ -307,11 +307,13 @@
+ self.assertEqual(id(s), id(s*1))
+
+ def test_bigrepeat(self):
+- x = self.type2test([0])
+- x *= 2**16
+- self.assertRaises(MemoryError, x.__mul__, 2**16)
+- if hasattr(x, '__imul__'):
+- self.assertRaises(MemoryError, x.__imul__, 2**16)
++ import sys
++ if sys.maxint <= 2147483647:
++ x = self.type2test([0])
++ x *= 2**16
++ self.assertRaises(MemoryError, x.__mul__, 2**16)
++ if hasattr(x, '__imul__'):
++ self.assertRaises(MemoryError, x.__imul__, 2**16)
+
+ def test_subscript(self):
+ a = self.type2test([10, 11])
diff --git a/lang/python26/files/patch-modules_almodule.c b/lang/python26/files/patch-modules_almodule.c
new file mode 100644
index 000000000000..8e3b9b11380d
--- /dev/null
+++ b/lang/python26/files/patch-modules_almodule.c
@@ -0,0 +1,14 @@
+--- Modules/almodule.c.orig 2006-09-25 07:53:42.000000000 +0100
++++ Modules/almodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -1633,9 +1633,11 @@
+ if (nvals < 0)
+ goto cleanup;
+ if (nvals > setsize) {
++ ALvalue *old_return_set = return_set;
+ setsize = nvals;
+ PyMem_RESIZE(return_set, ALvalue, setsize);
+ if (return_set == NULL) {
++ return_set = old_return_set;
+ PyErr_NoMemory();
+ goto cleanup;
+ }
diff --git a/lang/python26/files/patch-modules_arraymodule.c b/lang/python26/files/patch-modules_arraymodule.c
new file mode 100644
index 000000000000..738ee48f1485
--- /dev/null
+++ b/lang/python26/files/patch-modules_arraymodule.c
@@ -0,0 +1,33 @@
+--- Modules/arraymodule.c.orig 2008-02-15 19:11:46.000000000 +0000
++++ Modules/arraymodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -816,6 +816,7 @@
+ array_do_extend(arrayobject *self, PyObject *bb)
+ {
+ Py_ssize_t size;
++ char *old_item;
+
+ if (!array_Check(bb))
+ return array_iter_extend(self, bb);
+@@ -831,10 +832,11 @@
+ return -1;
+ }
+ size = self->ob_size + b->ob_size;
++ old_item = self->ob_item;
+ PyMem_RESIZE(self->ob_item, char, size*self->ob_descr->itemsize);
+ if (self->ob_item == NULL) {
+- PyObject_Del(self);
+- PyErr_NoMemory();
++ self->ob_item = old_item;
++ PyErr_NoMemory();
+ return -1;
+ }
+ memcpy(self->ob_item + self->ob_size*self->ob_descr->itemsize,
+@@ -886,7 +888,7 @@
+ if (size > PY_SSIZE_T_MAX / n) {
+ return PyErr_NoMemory();
+ }
+- PyMem_Resize(items, char, n * size);
++ PyMem_RESIZE(items, char, n * size);
+ if (items == NULL)
+ return PyErr_NoMemory();
+ p = items;
diff --git a/lang/python26/files/patch-modules_gcmodule.c b/lang/python26/files/patch-modules_gcmodule.c
new file mode 100644
index 000000000000..59253930e9c3
--- /dev/null
+++ b/lang/python26/files/patch-modules_gcmodule.c
@@ -0,0 +1,58 @@
+--- Include/pymem.h.orig 2008-02-14 11:26:18.000000000 +0000
++++ Include/pymem.h 2008-08-30 10:39:43.000000000 +0100
+@@ -67,8 +67,12 @@
+ for malloc(0), which would be treated as an error. Some platforms
+ would return a pointer with no memory behind it, which would break
+ pymalloc. To solve these problems, allocate an extra byte. */
+-#define PyMem_MALLOC(n) malloc((n) ? (n) : 1)
+-#define PyMem_REALLOC(p, n) realloc((p), (n) ? (n) : 1)
++/* Returns NULL to indicate error if a negative size or size larger than
++ Py_ssize_t can represent is supplied. Helps prevents security holes. */
++#define PyMem_MALLOC(n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : malloc((n) ? (n) : 1))
++#define PyMem_REALLOC(p, n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : realloc((p), (n) ? (n) : 1))
+ #define PyMem_FREE free
+
+ #endif /* PYMALLOC_DEBUG */
+@@ -77,24 +81,31 @@
+ * Type-oriented memory interface
+ * ==============================
+ *
+- * These are carried along for historical reasons. There's rarely a good
+- * reason to use them anymore (you can just as easily do the multiply and
+- * cast yourself).
++ * Allocate memory for n objects of the given type. Returns a new pointer
++ * or NULL if the request was too large or memory allocation failed. Use
++ * these macros rather than doing the multiplication yourself so that proper
++ * overflow checking is always done.
+ */
+
+ #define PyMem_New(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_Malloc((n) * sizeof(type)) ) )
+ #define PyMem_NEW(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_MALLOC((n) * sizeof(type)) ) )
+
++/*
++ * The value of (p) is always clobbered by this macro regardless of success.
++ * The caller MUST check if (p) is NULL afterwards and deal with the memory
++ * error if so. This means the original value of (p) MUST be saved for the
++ * caller's memory error handler to not lose track of it.
++ */
+ #define PyMem_Resize(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_Realloc((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_Realloc((p), (n) * sizeof(type)) )
+ #define PyMem_RESIZE(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_REALLOC((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_REALLOC((p), (n) * sizeof(type)) )
+
+ /* PyMem{Del,DEL} are left over from ancient days, and shouldn't be used
+ * anymore. They're just confusing aliases for PyMem_{Free,FREE} now.
diff --git a/lang/python26/files/patch-modules_hashopenssl.c b/lang/python26/files/patch-modules_hashopenssl.c
new file mode 100644
index 000000000000..1f0cd56a244a
--- /dev/null
+++ b/lang/python26/files/patch-modules_hashopenssl.c
@@ -0,0 +1,104 @@
+--- Modules/_hashopenssl.c.orig 2006-05-29 22:04:52.000000000 +0100
++++ Modules/_hashopenssl.c 2008-08-30 10:43:27.000000000 +0100
+@@ -19,6 +19,8 @@
+ /* EVP is the preferred interface to hashing in OpenSSL */
+ #include <openssl/evp.h>
+
++#define MUNCH_SIZE INT_MAX
++
+
+ #ifndef HASH_OBJ_CONSTRUCTOR
+ #define HASH_OBJ_CONSTRUCTOR 0
+@@ -164,9 +166,18 @@
+ if (!PyArg_ParseTuple(args, "s#:update", &cp, &len))
+ return NULL;
+
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
+-
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+@@ -255,9 +266,20 @@
+ self->name = name_obj;
+ Py_INCREF(self->name);
+
+- if (cp && len)
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return 0;
+ }
+@@ -328,7 +350,7 @@
+ static PyObject *
+ EVPnew(PyObject *name_obj,
+ const EVP_MD *digest, const EVP_MD_CTX *initial_ctx,
+- const unsigned char *cp, unsigned int len)
++ const unsigned char *cp, Py_ssize_t len)
+ {
+ EVPobject *self;
+
+@@ -346,8 +368,20 @@
+ EVP_DigestInit(&self->ctx, digest);
+ }
+
+- if (cp && len)
+- EVP_DigestUpdate(&self->ctx, cp, len);
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
++ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
++ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return (PyObject *)self;
+ }
+@@ -384,8 +418,7 @@
+
+ digest = EVP_get_digestbyname(name);
+
+- return EVPnew(name_obj, digest, NULL, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+- unsigned int));
++ return EVPnew(name_obj, digest, NULL, cp, len);
+ }
+
+ /*
+@@ -410,7 +443,7 @@
+ CONST_ ## NAME ## _name_obj, \
+ NULL, \
+ CONST_new_ ## NAME ## _ctx_p, \
+- cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, unsigned int)); \
++ cp, len); \
+ }
+
+ /* a PyMethodDef structure for the constructor */
diff --git a/lang/python26/files/patch-modules_mmapmodule.c b/lang/python26/files/patch-modules_mmapmodule.c
new file mode 100644
index 000000000000..60f3d71ff349
--- /dev/null
+++ b/lang/python26/files/patch-modules_mmapmodule.c
@@ -0,0 +1,11 @@
+--- Modules/mmapmodule.c.orig 2006-08-22 14:57:07.000000000 +0100
++++ Modules/mmapmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -223,7 +223,7 @@
+ return(NULL);
+
+ /* silently 'adjust' out-of-range requests */
+- if ((self->pos + num_bytes) > self->size) {
++ if (num_bytes > self->size - self->pos) {
+ num_bytes -= (self->pos+num_bytes) - self->size;
+ }
+ result = Py_BuildValue("s#", self->data+self->pos, num_bytes);
diff --git a/lang/python26/files/patch-modules_selectmodule.c b/lang/python26/files/patch-modules_selectmodule.c
new file mode 100644
index 000000000000..446241f05a97
--- /dev/null
+++ b/lang/python26/files/patch-modules_selectmodule.c
@@ -0,0 +1,16 @@
+--- Modules/selectmodule.c.orig 2006-07-10 02:18:57.000000000 +0100
++++ Modules/selectmodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -349,10 +349,12 @@
+ {
+ Py_ssize_t i, pos;
+ PyObject *key, *value;
++ struct pollfd *old_ufds = self->ufds;
+
+ self->ufd_len = PyDict_Size(self->dict);
+- PyMem_Resize(self->ufds, struct pollfd, self->ufd_len);
++ PyMem_RESIZE(self->ufds, struct pollfd, self->ufd_len);
+ if (self->ufds == NULL) {
++ self->ufds = old_ufds;
+ PyErr_NoMemory();
+ return 0;
+ }
diff --git a/lang/python26/files/patch-modules_stropmodule.c b/lang/python26/files/patch-modules_stropmodule.c
new file mode 100644
index 000000000000..d7f42ce18871
--- /dev/null
+++ b/lang/python26/files/patch-modules_stropmodule.c
@@ -0,0 +1,31 @@
+--- Modules/stropmodule.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Modules/stropmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -216,6 +216,13 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0)
+ return NULL;
+@@ -253,6 +260,14 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ Py_XDECREF(item);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0) {
+ Py_DECREF(item);
diff --git a/lang/python26/files/patch-objects_bufferobject.c b/lang/python26/files/patch-objects_bufferobject.c
new file mode 100644
index 000000000000..16e99568ad9a
--- /dev/null
+++ b/lang/python26/files/patch-objects_bufferobject.c
@@ -0,0 +1,13 @@
+--- Objects/bufferobject.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/bufferobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -427,6 +427,10 @@
+ count = 0;
+ if (!get_buf(self, &ptr, &size, ANY_BUFFER))
+ return NULL;
++ if (count > PY_SSIZE_T_MAX / size) {
++ PyErr_SetString(PyExc_MemoryError, "result too large");
++ return NULL;
++ }
+ ob = PyString_FromStringAndSize(NULL, size * count);
+ if ( ob == NULL )
+ return NULL;
diff --git a/lang/python26/files/patch-objects_longobject.c b/lang/python26/files/patch-objects_longobject.c
new file mode 100644
index 000000000000..1221db9fed0a
--- /dev/null
+++ b/lang/python26/files/patch-objects_longobject.c
@@ -0,0 +1,11 @@
+--- Objects/longobject.c.orig 2007-05-07 19:30:48.000000000 +0100
++++ Objects/longobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -70,6 +70,8 @@
+ PyErr_NoMemory();
+ return NULL;
+ }
++ /* XXX(nnorwitz): This can overflow --
++ PyObject_NEW_VAR / _PyObject_VAR_SIZE need to detect overflow */
+ return PyObject_NEW_VAR(PyLongObject, &PyLong_Type, size);
+ }
+
diff --git a/lang/python26/files/patch-objects_obmalloc.c b/lang/python26/files/patch-objects_obmalloc.c
new file mode 100644
index 000000000000..27050596fbfc
--- /dev/null
+++ b/lang/python26/files/patch-objects_obmalloc.c
@@ -0,0 +1,34 @@
+--- Objects/obmalloc.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/obmalloc.c 2008-08-30 10:39:43.000000000 +0100
+@@ -727,6 +727,15 @@
+ uint size;
+
+ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
++ /*
+ * This implicitly redirects malloc(0).
+ */
+ if ((nbytes - 1) < SMALL_REQUEST_THRESHOLD) {
+@@ -1130,6 +1139,15 @@
+ if (p == NULL)
+ return PyObject_Malloc(nbytes);
+
++ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
+ pool = POOL_ADDR(p);
+ if (Py_ADDRESS_IN_RANGE(p, pool)) {
+ /* We're in charge of this block */
diff --git a/lang/python26/files/patch-objects_stringobject.c b/lang/python26/files/patch-objects_stringobject.c
new file mode 100644
index 000000000000..af55c78a4928
--- /dev/null
+++ b/lang/python26/files/patch-objects_stringobject.c
@@ -0,0 +1,49 @@
+--- Objects/stringobject.c.orig 2007-11-07 01:19:49.000000000 +0000
++++ Objects/stringobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -71,6 +71,11 @@
+ return (PyObject *)op;
+ }
+
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError, "string is too large");
++ return NULL;
++ }
++
+ /* Inline PyObject_NewVar */
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+@@ -106,7 +111,7 @@
+
+ assert(str != NULL);
+ size = strlen(str);
+- if (size > PY_SSIZE_T_MAX) {
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
+ PyErr_SetString(PyExc_OverflowError,
+ "string is too long for a Python string");
+ return NULL;
+@@ -967,14 +972,24 @@
+ Py_INCREF(a);
+ return (PyObject *)a;
+ }
++ /* Check that string sizes are not negative, to prevent an
++ overflow in cases where we are passed incorrectly-created
++ strings with negative lengths (due to a bug in other code).
++ */
+ size = a->ob_size + b->ob_size;
+- if (size < 0) {
++ if (a->ob_size < 0 || b->ob_size < 0 ||
++ a->ob_size > PY_SSIZE_T_MAX - b->ob_size) {
+ PyErr_SetString(PyExc_OverflowError,
+ "strings are too large to concat");
+ return NULL;
+ }
+
+ /* Inline PyObject_NewVar */
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError,
++ "strings are too large to concat");
++ return NULL;
++ }
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+ return PyErr_NoMemory();
diff --git a/lang/python26/files/patch-objects_tupleobject.c b/lang/python26/files/patch-objects_tupleobject.c
new file mode 100644
index 000000000000..eb133b6e002c
--- /dev/null
+++ b/lang/python26/files/patch-objects_tupleobject.c
@@ -0,0 +1,17 @@
+--- Objects/tupleobject.c.orig 2006-08-12 18:03:09.000000000 +0100
++++ Objects/tupleobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -60,11 +60,12 @@
+ Py_ssize_t nbytes = size * sizeof(PyObject *);
+ /* Check for overflow */
+ if (nbytes / sizeof(PyObject *) != (size_t)size ||
+- (nbytes += sizeof(PyTupleObject) - sizeof(PyObject *))
+- <= 0)
++ (nbytes > PY_SSIZE_T_MAX - sizeof(PyTupleObject) - sizeof(PyObject *)))
+ {
+ return PyErr_NoMemory();
+ }
++ nbytes += sizeof(PyTupleObject) - sizeof(PyObject *);
++
+ op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
+ if (op == NULL)
+ return NULL;
diff --git a/lang/python26/files/patch-objects_unicodeobject.c b/lang/python26/files/patch-objects_unicodeobject.c
new file mode 100644
index 000000000000..85e88caae0e3
--- /dev/null
+++ b/lang/python26/files/patch-objects_unicodeobject.c
@@ -0,0 +1,115 @@
+--- Objects/unicodeobject.c.orig 2007-11-02 22:46:38.000000000 +0000
++++ Objects/unicodeobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -239,6 +239,11 @@
+ return unicode_empty;
+ }
+
++ /* Ensure we won't overflow the size. */
++ if (length > ((PY_SSIZE_T_MAX / sizeof(Py_UNICODE)) - 1)) {
++ return (PyUnicodeObject *)PyErr_NoMemory();
++ }
++
+ /* Unicode freelist & memory allocation */
+ if (unicode_freelist) {
+ unicode = unicode_freelist;
+@@ -1091,6 +1096,9 @@
+ char * out;
+ char * start;
+
++ if (cbAllocated / 5 != size)
++ return PyErr_NoMemory();
++
+ if (size == 0)
+ return PyString_FromStringAndSize(NULL, 0);
+
+@@ -1689,8 +1697,9 @@
+ {
+ PyObject *v;
+ unsigned char *p;
++ Py_ssize_t nsize, bytesize;
+ #ifdef Py_UNICODE_WIDE
+- int i, pairs;
++ Py_ssize_t i, pairs;
+ #else
+ const int pairs = 0;
+ #endif
+@@ -1713,8 +1722,15 @@
+ if (s[i] >= 0x10000)
+ pairs++;
+ #endif
+- v = PyString_FromStringAndSize(NULL,
+- 2 * (size + pairs + (byteorder == 0)));
++ /* 2 * (size + pairs + (byteorder == 0)) */
++ if (size > PY_SSIZE_T_MAX ||
++ size > PY_SSIZE_T_MAX - pairs - (byteorder == 0))
++ return PyErr_NoMemory();
++ nsize = (size + pairs + (byteorder == 0));
++ bytesize = nsize * 2;
++ if (bytesize / 2 != nsize)
++ return PyErr_NoMemory();
++ v = PyString_FromStringAndSize(NULL, bytesize);
+ if (v == NULL)
+ return NULL;
+
+@@ -2042,6 +2058,11 @@
+ char *p;
+
+ static const char *hexdigit = "0123456789abcdef";
++#ifdef Py_UNICODE_WIDE
++ const Py_ssize_t expandsize = 10;
++#else
++ const Py_ssize_t expandsize = 6;
++#endif
+
+ /* Initial allocation is based on the longest-possible unichr
+ escape.
+@@ -2057,13 +2078,12 @@
+ escape.
+ */
+
++ if (size > (PY_SSIZE_T_MAX - 2 - 1) / expandsize)
++ return PyErr_NoMemory();
++
+ repr = PyString_FromStringAndSize(NULL,
+ 2
+-#ifdef Py_UNICODE_WIDE
+- + 10*size
+-#else
+- + 6*size
+-#endif
++ + expandsize*size
+ + 1);
+ if (repr == NULL)
+ return NULL;
+@@ -2304,12 +2324,16 @@
+ char *q;
+
+ static const char *hexdigit = "0123456789abcdef";
+-
+ #ifdef Py_UNICODE_WIDE
+- repr = PyString_FromStringAndSize(NULL, 10 * size);
++ const Py_ssize_t expandsize = 10;
+ #else
+- repr = PyString_FromStringAndSize(NULL, 6 * size);
++ const Py_ssize_t expandsize = 6;
+ #endif
++
++ if (size > PY_SSIZE_T_MAX / expandsize)
++ return PyErr_NoMemory();
++
++ repr = PyString_FromStringAndSize(NULL, expandsize * size);
+ if (repr == NULL)
+ return NULL;
+ if (size == 0)
+@@ -4719,6 +4743,11 @@
+ return self;
+ }
+
++ if (left > PY_SSIZE_T_MAX - self->length ||
++ right > PY_SSIZE_T_MAX - (left + self->length)) {
++ PyErr_SetString(PyExc_OverflowError, "padded string is too long");
++ return NULL;
++ }
+ u = _PyUnicode_New(left + self->length + right);
+ if (u) {
+ if (left)
diff --git a/lang/python26/files/patch-python_mysnprintf.c b/lang/python26/files/patch-python_mysnprintf.c
new file mode 100644
index 000000000000..276dd21a1b31
--- /dev/null
+++ b/lang/python26/files/patch-python_mysnprintf.c
@@ -0,0 +1,55 @@
+--- Python/mysnprintf.c.orig 2001-12-21 16:32:15.000000000 +0000
++++ Python/mysnprintf.c 2008-08-30 10:46:31.000000000 +0100
+@@ -54,18 +54,28 @@
+ PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va)
+ {
+ int len; /* # bytes written, excluding \0 */
+-#ifndef HAVE_SNPRINTF
++#ifdef HAVE_SNPRINTF
++#define _PyOS_vsnprintf_EXTRA_SPACE 1
++#else
++#define _PyOS_vsnprintf_EXTRA_SPACE 512
+ char *buffer;
+ #endif
+ assert(str != NULL);
+ assert(size > 0);
+ assert(format != NULL);
++ /* We take a size_t as input but return an int. Sanity check
++ * our input so that it won't cause an overflow in the
++ * vsnprintf return value or the buffer malloc size. */
++ if (size > INT_MAX - _PyOS_vsnprintf_EXTRA_SPACE) {
++ len = -666;
++ goto Done;
++ }
+
+ #ifdef HAVE_SNPRINTF
+ len = vsnprintf(str, size, format, va);
+ #else
+ /* Emulate it. */
+- buffer = PyMem_MALLOC(size + 512);
++ buffer = PyMem_MALLOC(size + _PyOS_vsnprintf_EXTRA_SPACE);
+ if (buffer == NULL) {
+ len = -666;
+ goto Done;
+@@ -75,7 +85,7 @@
+ if (len < 0)
+ /* ignore the error */;
+
+- else if ((size_t)len >= size + 512)
++ else if ((size_t)len >= size + _PyOS_vsnprintf_EXTRA_SPACE)
+ Py_FatalError("Buffer overflow in PyOS_snprintf/PyOS_vsnprintf");
+
+ else {
+@@ -86,8 +96,10 @@
+ str[to_copy] = '\0';
+ }
+ PyMem_FREE(buffer);
+-Done:
+ #endif
+- str[size-1] = '\0';
++Done:
++ if (size > 0)
++ str[size-1] = '\0';
+ return len;
++#undef _PyOS_vsnprintf_EXTRA_SPACE
+ }
diff --git a/lang/python27/Makefile b/lang/python27/Makefile
index 6224b163004d..fa6cd7bd7061 100644
--- a/lang/python27/Makefile
+++ b/lang/python27/Makefile
@@ -6,7 +6,7 @@
PORTNAME= python25
PORTVERSION= 2.5.2
-PORTREVISION= 2
+PORTREVISION= 3
CATEGORIES= lang python ipv6
MASTER_SITES= ${PYTHON_MASTER_SITES}
MASTER_SITE_SUBDIR= ${PYTHON_MASTER_SITE_SUBDIR}
diff --git a/lang/python27/files/patch-lib-test_test_bigmem.py b/lang/python27/files/patch-lib-test_test_bigmem.py
new file mode 100644
index 000000000000..5ec5935a046f
--- /dev/null
+++ b/lang/python27/files/patch-lib-test_test_bigmem.py
@@ -0,0 +1,163 @@
+--- Lib/test/test_bigmem.py.orig 2007-11-30 21:53:17.000000000 +0000
++++ Lib/test/test_bigmem.py 2008-08-30 10:16:13.000000000 +0100
+@@ -1,5 +1,5 @@
+ from test import test_support
+-from test.test_support import bigmemtest, _1G, _2G
++from test.test_support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest
+
+ import unittest
+ import operator
+@@ -54,6 +54,22 @@
+ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
+ self.assertEquals(s.strip(), SUBSTR.strip())
+
++ @precisionbigmemtest(size=_2G - 1, memuse=1)
++ def test_center_unicode(self, size):
++ SUBSTR = u' abc def ghi'
++ try:
++ s = SUBSTR.center(size)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(s), size)
++ lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2
++ if len(s) % 2:
++ lpadsize += 1
++ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
++ self.assertEquals(s.strip(), SUBSTR.strip())
++ del s
++
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_count(self, size):
+ SUBSTR = ' abc def ghi'
+@@ -70,10 +86,44 @@
+ s = '.' * size
+ self.assertEquals(len(s.decode('utf-8')), size)
+
++ def basic_encode_test(self, size, enc, c=u'.', expectedsize=None):
++ if expectedsize is None:
++ expectedsize = size
++
++ s = c * size
++ self.assertEquals(len(s.encode(enc)), expectedsize)
++
+ @bigmemtest(minsize=_2G + 2, memuse=3)
+ def test_encode(self, size):
+- s = u'.' * size
+- self.assertEquals(len(s.encode('utf-8')), size)
++ return self.basic_encode_test(size, 'utf-8')
++
++ @precisionbigmemtest(size=_4G / 6 + 2, memuse=2)
++ def test_encode_raw_unicode_escape(self, size):
++ try:
++ return self.basic_encode_test(size, 'raw_unicode_escape')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_4G / 5 + 70, memuse=3)
++ def test_encode_utf7(self, size):
++ try:
++ return self.basic_encode_test(size, 'utf7')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_2G-1, memuse=2)
++ def test_decodeascii(self, size):
++ return self.basic_encode_test(size, 'ascii', c='A')
++
++ @precisionbigmemtest(size=_4G / 5, memuse=6+2)
++ def test_unicode_repr_oflw(self, size):
++ try:
++ s = u"\uAAAA"*size
++ r = repr(s)
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ self.failUnless(s == eval(r))
+
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_endswith(self, size):
+@@ -459,6 +509,11 @@
+ self.assertEquals(s.count('\\'), size)
+ self.assertEquals(s.count('0'), size * 2)
+
++ @bigmemtest(minsize=2**32 / 5, memuse=6+2)
++ def test_unicode_repr(self, size):
++ s = u"\uAAAA" * size
++ self.failUnless(len(repr(s)) > size)
++
+ # This test is meaningful even with size < 2G, as long as the
+ # doubled string is > 2G (but it tests more if both are > 2G :)
+ @bigmemtest(minsize=_1G + 2, memuse=3)
+@@ -642,6 +697,35 @@
+ def test_repeat_large(self, size):
+ return self.basic_test_repeat(size)
+
++ @bigmemtest(minsize=_1G - 1, memuse=12)
++ def test_repeat_large_2(self, size):
++ return self.basic_test_repeat(size)
++
++ @precisionbigmemtest(size=_1G - 1, memuse=9)
++ def test_from_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++
++ @precisionbigmemtest(size=_1G - 25, memuse=9)
++ def test_from_almost_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++ except MemoryError:
++ pass # acceptable, expected on 32-bit
++
+ # Like test_concat, split in two.
+ def basic_test_repr(self, size):
+ t = (0,) * size
+@@ -957,8 +1041,34 @@
+ self.assertEquals(l[:10], [1] * 10)
+ self.assertEquals(l[-10:], [5] * 10)
+
++class BufferTest(unittest.TestCase):
++
++ @precisionbigmemtest(size=_1G, memuse=4)
++ def test_repeat(self, size):
++ try:
++ b = buffer("AAAA")*size
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for c in b:
++ self.assertEquals(c, 'A')
++ count += 1
++ self.assertEquals(count, size*4)
++
+ def test_main():
+- test_support.run_unittest(StrTest, TupleTest, ListTest)
++ test_support.run_unittest(StrTest, TupleTest, ListTest, BufferTest)
++
++# Expected failures (crashers)
++# del StrTest.test_center_unicode
++del StrTest.test_decodeascii
++# del StrTest.test_encode_utf32
++# del StrTest.test_encode_utf7
++# del StrTest.test_encode_raw_unicode_escape
++#
++# del TupleTest.test_from_2G_generator
++#
++# del BufferTest.test_repeat
+
+ if __name__ == '__main__':
+ if len(sys.argv) > 1:
diff --git a/lang/python27/files/patch-lib-test_test_hashlib.py b/lang/python27/files/patch-lib-test_test_hashlib.py
new file mode 100644
index 000000000000..15ede2b17e8c
--- /dev/null
+++ b/lang/python27/files/patch-lib-test_test_hashlib.py
@@ -0,0 +1,41 @@
+--- Lib/test/test_hashlib.py.orig 2005-08-21 19:45:59.000000000 +0100
++++ Lib/test/test_hashlib.py 2008-08-30 10:43:27.000000000 +0100
+@@ -9,7 +9,7 @@
+ import hashlib
+ import unittest
+ from test import test_support
+-
++from test.test_support import _4G, precisionbigmemtest
+
+ def hexstr(s):
+ import string
+@@ -55,7 +55,6 @@
+ m2.update(aas + bees + cees)
+ self.assertEqual(m1.digest(), m2.digest())
+
+-
+ def check(self, name, data, digest):
+ # test the direct constructors
+ computed = getattr(hashlib, name)(data).hexdigest()
+@@ -75,6 +74,21 @@
+ self.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
+ 'd174ab98d277d9f5a5611c2c9f419d9f')
+
++ @precisionbigmemtest(size=_4G + 5, memuse=1)
++ def test_case_md5_huge(self, size):
++ if size == _4G + 5:
++ try:
++ self.check('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
++ except OverflowError:
++ pass # 32-bit arch
++
++ @precisionbigmemtest(size=_4G - 1, memuse=1)
++ def test_case_md5_uintmax(self, size):
++ if size == _4G - 1:
++ try:
++ self.check('md5', 'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
++ except OverflowError:
++ pass # 32-bit arch
+
+ # use the three examples from Federal Information Processing Standards
+ # Publication 180-1, Secure Hash Standard, 1995 April 17
diff --git a/lang/python27/files/patch-lib-test_test_strop.py b/lang/python27/files/patch-lib-test_test_strop.py
new file mode 100644
index 000000000000..f0e40166957a
--- /dev/null
+++ b/lang/python27/files/patch-lib-test_test_strop.py
@@ -0,0 +1,28 @@
+--- Lib/test/test_strop.py.orig 2002-07-31 00:27:12.000000000 +0100
++++ Lib/test/test_strop.py 2008-08-30 10:16:13.000000000 +0100
+@@ -115,6 +115,25 @@
+ strop.uppercase
+ strop.whitespace
+
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=5)
++ def test_stropjoin_huge_list(self, size):
++ a = "A" * size
++ try:
++ r = strop.join([a, a], a)
++ except OverflowError:
++ pass
++ else:
++ self.assertEquals(len(r), len(a) * 3)
++
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=1)
++ def test_stropjoin_huge_tup(self, size):
++ a = "A" * size
++ try:
++ r = strop.join((a, a), a)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(r), len(a) * 3)
+
+ transtable = '\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`xyzdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377'
+
diff --git a/lang/python27/files/patch-lib-test_test_support.py b/lang/python27/files/patch-lib-test_test_support.py
new file mode 100644
index 000000000000..b11933bc7eed
--- /dev/null
+++ b/lang/python27/files/patch-lib-test_test_support.py
@@ -0,0 +1,62 @@
+--- Lib/test/test_support.py.orig 2008-01-27 01:24:44.000000000 +0000
++++ Lib/test/test_support.py 2008-08-30 10:16:13.000000000 +0100
+@@ -33,6 +33,7 @@
+ use_resources = None # Flag set to [] by regrtest.py
+ max_memuse = 0 # Disable bigmem tests (they will still be run with
+ # small sizes, to make sure they work.)
++real_max_memuse = 0
+
+ # _original_stdout is meant to hold stdout at the time regrtest began.
+ # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
+@@ -323,6 +324,7 @@
+ _1M = 1024*1024
+ _1G = 1024 * _1M
+ _2G = 2 * _1G
++_4G = 4 * _1G
+
+ # Hack to get at the maximum value an internal index can take.
+ class _Dummy:
+@@ -333,6 +335,7 @@
+ def set_memlimit(limit):
+ import re
+ global max_memuse
++ global real_max_memuse
+ sizes = {
+ 'k': 1024,
+ 'm': _1M,
+@@ -344,6 +347,7 @@
+ if m is None:
+ raise ValueError('Invalid memory limit %r' % (limit,))
+ memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
++ real_max_memuse = memlimit
+ if memlimit > MAX_Py_ssize_t:
+ memlimit = MAX_Py_ssize_t
+ if memlimit < _2G - 1:
+@@ -389,6 +393,27 @@
+ return wrapper
+ return decorator
+
++def precisionbigmemtest(size, memuse, overhead=5*_1M):
++ def decorator(f):
++ def wrapper(self):
++ if not real_max_memuse:
++ maxsize = 5147
++ else:
++ maxsize = size
++
++ if real_max_memuse and real_max_memuse < maxsize * memuse:
++ if verbose:
++ sys.stderr.write("Skipping %s because of memory "
++ "constraint\n" % (f.__name__,))
++ return
++
++ return f(self, maxsize)
++ wrapper.size = size
++ wrapper.memuse = memuse
++ wrapper.overhead = overhead
++ return wrapper
++ return decorator
++
+ def bigaddrspacetest(f):
+ """Decorator for tests that fill the address space."""
+ def wrapper(self):
diff --git a/lang/python27/files/patch-lib_seq_tests.py b/lang/python27/files/patch-lib_seq_tests.py
new file mode 100644
index 000000000000..9be35ae82517
--- /dev/null
+++ b/lang/python27/files/patch-lib_seq_tests.py
@@ -0,0 +1,21 @@
+--- Lib/test/seq_tests.py.orig 2007-11-12 20:04:41.000000000 +0000
++++ Lib/test/seq_tests.py 2008-08-30 10:16:13.000000000 +0100
+@@ -307,11 +307,13 @@
+ self.assertEqual(id(s), id(s*1))
+
+ def test_bigrepeat(self):
+- x = self.type2test([0])
+- x *= 2**16
+- self.assertRaises(MemoryError, x.__mul__, 2**16)
+- if hasattr(x, '__imul__'):
+- self.assertRaises(MemoryError, x.__imul__, 2**16)
++ import sys
++ if sys.maxint <= 2147483647:
++ x = self.type2test([0])
++ x *= 2**16
++ self.assertRaises(MemoryError, x.__mul__, 2**16)
++ if hasattr(x, '__imul__'):
++ self.assertRaises(MemoryError, x.__imul__, 2**16)
+
+ def test_subscript(self):
+ a = self.type2test([10, 11])
diff --git a/lang/python27/files/patch-modules_almodule.c b/lang/python27/files/patch-modules_almodule.c
new file mode 100644
index 000000000000..8e3b9b11380d
--- /dev/null
+++ b/lang/python27/files/patch-modules_almodule.c
@@ -0,0 +1,14 @@
+--- Modules/almodule.c.orig 2006-09-25 07:53:42.000000000 +0100
++++ Modules/almodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -1633,9 +1633,11 @@
+ if (nvals < 0)
+ goto cleanup;
+ if (nvals > setsize) {
++ ALvalue *old_return_set = return_set;
+ setsize = nvals;
+ PyMem_RESIZE(return_set, ALvalue, setsize);
+ if (return_set == NULL) {
++ return_set = old_return_set;
+ PyErr_NoMemory();
+ goto cleanup;
+ }
diff --git a/lang/python27/files/patch-modules_arraymodule.c b/lang/python27/files/patch-modules_arraymodule.c
new file mode 100644
index 000000000000..738ee48f1485
--- /dev/null
+++ b/lang/python27/files/patch-modules_arraymodule.c
@@ -0,0 +1,33 @@
+--- Modules/arraymodule.c.orig 2008-02-15 19:11:46.000000000 +0000
++++ Modules/arraymodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -816,6 +816,7 @@
+ array_do_extend(arrayobject *self, PyObject *bb)
+ {
+ Py_ssize_t size;
++ char *old_item;
+
+ if (!array_Check(bb))
+ return array_iter_extend(self, bb);
+@@ -831,10 +832,11 @@
+ return -1;
+ }
+ size = self->ob_size + b->ob_size;
++ old_item = self->ob_item;
+ PyMem_RESIZE(self->ob_item, char, size*self->ob_descr->itemsize);
+ if (self->ob_item == NULL) {
+- PyObject_Del(self);
+- PyErr_NoMemory();
++ self->ob_item = old_item;
++ PyErr_NoMemory();
+ return -1;
+ }
+ memcpy(self->ob_item + self->ob_size*self->ob_descr->itemsize,
+@@ -886,7 +888,7 @@
+ if (size > PY_SSIZE_T_MAX / n) {
+ return PyErr_NoMemory();
+ }
+- PyMem_Resize(items, char, n * size);
++ PyMem_RESIZE(items, char, n * size);
+ if (items == NULL)
+ return PyErr_NoMemory();
+ p = items;
diff --git a/lang/python27/files/patch-modules_gcmodule.c b/lang/python27/files/patch-modules_gcmodule.c
new file mode 100644
index 000000000000..59253930e9c3
--- /dev/null
+++ b/lang/python27/files/patch-modules_gcmodule.c
@@ -0,0 +1,58 @@
+--- Include/pymem.h.orig 2008-02-14 11:26:18.000000000 +0000
++++ Include/pymem.h 2008-08-30 10:39:43.000000000 +0100
+@@ -67,8 +67,12 @@
+ for malloc(0), which would be treated as an error. Some platforms
+ would return a pointer with no memory behind it, which would break
+ pymalloc. To solve these problems, allocate an extra byte. */
+-#define PyMem_MALLOC(n) malloc((n) ? (n) : 1)
+-#define PyMem_REALLOC(p, n) realloc((p), (n) ? (n) : 1)
++/* Returns NULL to indicate error if a negative size or size larger than
++ Py_ssize_t can represent is supplied. Helps prevents security holes. */
++#define PyMem_MALLOC(n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : malloc((n) ? (n) : 1))
++#define PyMem_REALLOC(p, n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : realloc((p), (n) ? (n) : 1))
+ #define PyMem_FREE free
+
+ #endif /* PYMALLOC_DEBUG */
+@@ -77,24 +81,31 @@
+ * Type-oriented memory interface
+ * ==============================
+ *
+- * These are carried along for historical reasons. There's rarely a good
+- * reason to use them anymore (you can just as easily do the multiply and
+- * cast yourself).
++ * Allocate memory for n objects of the given type. Returns a new pointer
++ * or NULL if the request was too large or memory allocation failed. Use
++ * these macros rather than doing the multiplication yourself so that proper
++ * overflow checking is always done.
+ */
+
+ #define PyMem_New(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_Malloc((n) * sizeof(type)) ) )
+ #define PyMem_NEW(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_MALLOC((n) * sizeof(type)) ) )
+
++/*
++ * The value of (p) is always clobbered by this macro regardless of success.
++ * The caller MUST check if (p) is NULL afterwards and deal with the memory
++ * error if so. This means the original value of (p) MUST be saved for the
++ * caller's memory error handler to not lose track of it.
++ */
+ #define PyMem_Resize(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_Realloc((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_Realloc((p), (n) * sizeof(type)) )
+ #define PyMem_RESIZE(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_REALLOC((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_REALLOC((p), (n) * sizeof(type)) )
+
+ /* PyMem{Del,DEL} are left over from ancient days, and shouldn't be used
+ * anymore. They're just confusing aliases for PyMem_{Free,FREE} now.
diff --git a/lang/python27/files/patch-modules_hashopenssl.c b/lang/python27/files/patch-modules_hashopenssl.c
new file mode 100644
index 000000000000..1f0cd56a244a
--- /dev/null
+++ b/lang/python27/files/patch-modules_hashopenssl.c
@@ -0,0 +1,104 @@
+--- Modules/_hashopenssl.c.orig 2006-05-29 22:04:52.000000000 +0100
++++ Modules/_hashopenssl.c 2008-08-30 10:43:27.000000000 +0100
+@@ -19,6 +19,8 @@
+ /* EVP is the preferred interface to hashing in OpenSSL */
+ #include <openssl/evp.h>
+
++#define MUNCH_SIZE INT_MAX
++
+
+ #ifndef HASH_OBJ_CONSTRUCTOR
+ #define HASH_OBJ_CONSTRUCTOR 0
+@@ -164,9 +166,18 @@
+ if (!PyArg_ParseTuple(args, "s#:update", &cp, &len))
+ return NULL;
+
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
+-
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+@@ -255,9 +266,20 @@
+ self->name = name_obj;
+ Py_INCREF(self->name);
+
+- if (cp && len)
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return 0;
+ }
+@@ -328,7 +350,7 @@
+ static PyObject *
+ EVPnew(PyObject *name_obj,
+ const EVP_MD *digest, const EVP_MD_CTX *initial_ctx,
+- const unsigned char *cp, unsigned int len)
++ const unsigned char *cp, Py_ssize_t len)
+ {
+ EVPobject *self;
+
+@@ -346,8 +368,20 @@
+ EVP_DigestInit(&self->ctx, digest);
+ }
+
+- if (cp && len)
+- EVP_DigestUpdate(&self->ctx, cp, len);
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
++ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
++ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return (PyObject *)self;
+ }
+@@ -384,8 +418,7 @@
+
+ digest = EVP_get_digestbyname(name);
+
+- return EVPnew(name_obj, digest, NULL, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+- unsigned int));
++ return EVPnew(name_obj, digest, NULL, cp, len);
+ }
+
+ /*
+@@ -410,7 +443,7 @@
+ CONST_ ## NAME ## _name_obj, \
+ NULL, \
+ CONST_new_ ## NAME ## _ctx_p, \
+- cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, unsigned int)); \
++ cp, len); \
+ }
+
+ /* a PyMethodDef structure for the constructor */
diff --git a/lang/python27/files/patch-modules_mmapmodule.c b/lang/python27/files/patch-modules_mmapmodule.c
new file mode 100644
index 000000000000..60f3d71ff349
--- /dev/null
+++ b/lang/python27/files/patch-modules_mmapmodule.c
@@ -0,0 +1,11 @@
+--- Modules/mmapmodule.c.orig 2006-08-22 14:57:07.000000000 +0100
++++ Modules/mmapmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -223,7 +223,7 @@
+ return(NULL);
+
+ /* silently 'adjust' out-of-range requests */
+- if ((self->pos + num_bytes) > self->size) {
++ if (num_bytes > self->size - self->pos) {
+ num_bytes -= (self->pos+num_bytes) - self->size;
+ }
+ result = Py_BuildValue("s#", self->data+self->pos, num_bytes);
diff --git a/lang/python27/files/patch-modules_selectmodule.c b/lang/python27/files/patch-modules_selectmodule.c
new file mode 100644
index 000000000000..446241f05a97
--- /dev/null
+++ b/lang/python27/files/patch-modules_selectmodule.c
@@ -0,0 +1,16 @@
+--- Modules/selectmodule.c.orig 2006-07-10 02:18:57.000000000 +0100
++++ Modules/selectmodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -349,10 +349,12 @@
+ {
+ Py_ssize_t i, pos;
+ PyObject *key, *value;
++ struct pollfd *old_ufds = self->ufds;
+
+ self->ufd_len = PyDict_Size(self->dict);
+- PyMem_Resize(self->ufds, struct pollfd, self->ufd_len);
++ PyMem_RESIZE(self->ufds, struct pollfd, self->ufd_len);
+ if (self->ufds == NULL) {
++ self->ufds = old_ufds;
+ PyErr_NoMemory();
+ return 0;
+ }
diff --git a/lang/python27/files/patch-modules_stropmodule.c b/lang/python27/files/patch-modules_stropmodule.c
new file mode 100644
index 000000000000..d7f42ce18871
--- /dev/null
+++ b/lang/python27/files/patch-modules_stropmodule.c
@@ -0,0 +1,31 @@
+--- Modules/stropmodule.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Modules/stropmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -216,6 +216,13 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0)
+ return NULL;
+@@ -253,6 +260,14 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ Py_XDECREF(item);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0) {
+ Py_DECREF(item);
diff --git a/lang/python27/files/patch-objects_bufferobject.c b/lang/python27/files/patch-objects_bufferobject.c
new file mode 100644
index 000000000000..16e99568ad9a
--- /dev/null
+++ b/lang/python27/files/patch-objects_bufferobject.c
@@ -0,0 +1,13 @@
+--- Objects/bufferobject.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/bufferobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -427,6 +427,10 @@
+ count = 0;
+ if (!get_buf(self, &ptr, &size, ANY_BUFFER))
+ return NULL;
++ if (count > PY_SSIZE_T_MAX / size) {
++ PyErr_SetString(PyExc_MemoryError, "result too large");
++ return NULL;
++ }
+ ob = PyString_FromStringAndSize(NULL, size * count);
+ if ( ob == NULL )
+ return NULL;
diff --git a/lang/python27/files/patch-objects_longobject.c b/lang/python27/files/patch-objects_longobject.c
new file mode 100644
index 000000000000..1221db9fed0a
--- /dev/null
+++ b/lang/python27/files/patch-objects_longobject.c
@@ -0,0 +1,11 @@
+--- Objects/longobject.c.orig 2007-05-07 19:30:48.000000000 +0100
++++ Objects/longobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -70,6 +70,8 @@
+ PyErr_NoMemory();
+ return NULL;
+ }
++ /* XXX(nnorwitz): This can overflow --
++ PyObject_NEW_VAR / _PyObject_VAR_SIZE need to detect overflow */
+ return PyObject_NEW_VAR(PyLongObject, &PyLong_Type, size);
+ }
+
diff --git a/lang/python27/files/patch-objects_obmalloc.c b/lang/python27/files/patch-objects_obmalloc.c
new file mode 100644
index 000000000000..27050596fbfc
--- /dev/null
+++ b/lang/python27/files/patch-objects_obmalloc.c
@@ -0,0 +1,34 @@
+--- Objects/obmalloc.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/obmalloc.c 2008-08-30 10:39:43.000000000 +0100
+@@ -727,6 +727,15 @@
+ uint size;
+
+ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
++ /*
+ * This implicitly redirects malloc(0).
+ */
+ if ((nbytes - 1) < SMALL_REQUEST_THRESHOLD) {
+@@ -1130,6 +1139,15 @@
+ if (p == NULL)
+ return PyObject_Malloc(nbytes);
+
++ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
+ pool = POOL_ADDR(p);
+ if (Py_ADDRESS_IN_RANGE(p, pool)) {
+ /* We're in charge of this block */
diff --git a/lang/python27/files/patch-objects_stringobject.c b/lang/python27/files/patch-objects_stringobject.c
new file mode 100644
index 000000000000..af55c78a4928
--- /dev/null
+++ b/lang/python27/files/patch-objects_stringobject.c
@@ -0,0 +1,49 @@
+--- Objects/stringobject.c.orig 2007-11-07 01:19:49.000000000 +0000
++++ Objects/stringobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -71,6 +71,11 @@
+ return (PyObject *)op;
+ }
+
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError, "string is too large");
++ return NULL;
++ }
++
+ /* Inline PyObject_NewVar */
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+@@ -106,7 +111,7 @@
+
+ assert(str != NULL);
+ size = strlen(str);
+- if (size > PY_SSIZE_T_MAX) {
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
+ PyErr_SetString(PyExc_OverflowError,
+ "string is too long for a Python string");
+ return NULL;
+@@ -967,14 +972,24 @@
+ Py_INCREF(a);
+ return (PyObject *)a;
+ }
++ /* Check that string sizes are not negative, to prevent an
++ overflow in cases where we are passed incorrectly-created
++ strings with negative lengths (due to a bug in other code).
++ */
+ size = a->ob_size + b->ob_size;
+- if (size < 0) {
++ if (a->ob_size < 0 || b->ob_size < 0 ||
++ a->ob_size > PY_SSIZE_T_MAX - b->ob_size) {
+ PyErr_SetString(PyExc_OverflowError,
+ "strings are too large to concat");
+ return NULL;
+ }
+
+ /* Inline PyObject_NewVar */
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError,
++ "strings are too large to concat");
++ return NULL;
++ }
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+ return PyErr_NoMemory();
diff --git a/lang/python27/files/patch-objects_tupleobject.c b/lang/python27/files/patch-objects_tupleobject.c
new file mode 100644
index 000000000000..eb133b6e002c
--- /dev/null
+++ b/lang/python27/files/patch-objects_tupleobject.c
@@ -0,0 +1,17 @@
+--- Objects/tupleobject.c.orig 2006-08-12 18:03:09.000000000 +0100
++++ Objects/tupleobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -60,11 +60,12 @@
+ Py_ssize_t nbytes = size * sizeof(PyObject *);
+ /* Check for overflow */
+ if (nbytes / sizeof(PyObject *) != (size_t)size ||
+- (nbytes += sizeof(PyTupleObject) - sizeof(PyObject *))
+- <= 0)
++ (nbytes > PY_SSIZE_T_MAX - sizeof(PyTupleObject) - sizeof(PyObject *)))
+ {
+ return PyErr_NoMemory();
+ }
++ nbytes += sizeof(PyTupleObject) - sizeof(PyObject *);
++
+ op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
+ if (op == NULL)
+ return NULL;
diff --git a/lang/python27/files/patch-objects_unicodeobject.c b/lang/python27/files/patch-objects_unicodeobject.c
new file mode 100644
index 000000000000..85e88caae0e3
--- /dev/null
+++ b/lang/python27/files/patch-objects_unicodeobject.c
@@ -0,0 +1,115 @@
+--- Objects/unicodeobject.c.orig 2007-11-02 22:46:38.000000000 +0000
++++ Objects/unicodeobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -239,6 +239,11 @@
+ return unicode_empty;
+ }
+
++ /* Ensure we won't overflow the size. */
++ if (length > ((PY_SSIZE_T_MAX / sizeof(Py_UNICODE)) - 1)) {
++ return (PyUnicodeObject *)PyErr_NoMemory();
++ }
++
+ /* Unicode freelist & memory allocation */
+ if (unicode_freelist) {
+ unicode = unicode_freelist;
+@@ -1091,6 +1096,9 @@
+ char * out;
+ char * start;
+
++ if (cbAllocated / 5 != size)
++ return PyErr_NoMemory();
++
+ if (size == 0)
+ return PyString_FromStringAndSize(NULL, 0);
+
+@@ -1689,8 +1697,9 @@
+ {
+ PyObject *v;
+ unsigned char *p;
++ Py_ssize_t nsize, bytesize;
+ #ifdef Py_UNICODE_WIDE
+- int i, pairs;
++ Py_ssize_t i, pairs;
+ #else
+ const int pairs = 0;
+ #endif
+@@ -1713,8 +1722,15 @@
+ if (s[i] >= 0x10000)
+ pairs++;
+ #endif
+- v = PyString_FromStringAndSize(NULL,
+- 2 * (size + pairs + (byteorder == 0)));
++ /* 2 * (size + pairs + (byteorder == 0)) */
++ if (size > PY_SSIZE_T_MAX ||
++ size > PY_SSIZE_T_MAX - pairs - (byteorder == 0))
++ return PyErr_NoMemory();
++ nsize = (size + pairs + (byteorder == 0));
++ bytesize = nsize * 2;
++ if (bytesize / 2 != nsize)
++ return PyErr_NoMemory();
++ v = PyString_FromStringAndSize(NULL, bytesize);
+ if (v == NULL)
+ return NULL;
+
+@@ -2042,6 +2058,11 @@
+ char *p;
+
+ static const char *hexdigit = "0123456789abcdef";
++#ifdef Py_UNICODE_WIDE
++ const Py_ssize_t expandsize = 10;
++#else
++ const Py_ssize_t expandsize = 6;
++#endif
+
+ /* Initial allocation is based on the longest-possible unichr
+ escape.
+@@ -2057,13 +2078,12 @@
+ escape.
+ */
+
++ if (size > (PY_SSIZE_T_MAX - 2 - 1) / expandsize)
++ return PyErr_NoMemory();
++
+ repr = PyString_FromStringAndSize(NULL,
+ 2
+-#ifdef Py_UNICODE_WIDE
+- + 10*size
+-#else
+- + 6*size
+-#endif
++ + expandsize*size
+ + 1);
+ if (repr == NULL)
+ return NULL;
+@@ -2304,12 +2324,16 @@
+ char *q;
+
+ static const char *hexdigit = "0123456789abcdef";
+-
+ #ifdef Py_UNICODE_WIDE
+- repr = PyString_FromStringAndSize(NULL, 10 * size);
++ const Py_ssize_t expandsize = 10;
+ #else
+- repr = PyString_FromStringAndSize(NULL, 6 * size);
++ const Py_ssize_t expandsize = 6;
+ #endif
++
++ if (size > PY_SSIZE_T_MAX / expandsize)
++ return PyErr_NoMemory();
++
++ repr = PyString_FromStringAndSize(NULL, expandsize * size);
+ if (repr == NULL)
+ return NULL;
+ if (size == 0)
+@@ -4719,6 +4743,11 @@
+ return self;
+ }
+
++ if (left > PY_SSIZE_T_MAX - self->length ||
++ right > PY_SSIZE_T_MAX - (left + self->length)) {
++ PyErr_SetString(PyExc_OverflowError, "padded string is too long");
++ return NULL;
++ }
+ u = _PyUnicode_New(left + self->length + right);
+ if (u) {
+ if (left)
diff --git a/lang/python27/files/patch-python_mysnprintf.c b/lang/python27/files/patch-python_mysnprintf.c
new file mode 100644
index 000000000000..276dd21a1b31
--- /dev/null
+++ b/lang/python27/files/patch-python_mysnprintf.c
@@ -0,0 +1,55 @@
+--- Python/mysnprintf.c.orig 2001-12-21 16:32:15.000000000 +0000
++++ Python/mysnprintf.c 2008-08-30 10:46:31.000000000 +0100
+@@ -54,18 +54,28 @@
+ PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va)
+ {
+ int len; /* # bytes written, excluding \0 */
+-#ifndef HAVE_SNPRINTF
++#ifdef HAVE_SNPRINTF
++#define _PyOS_vsnprintf_EXTRA_SPACE 1
++#else
++#define _PyOS_vsnprintf_EXTRA_SPACE 512
+ char *buffer;
+ #endif
+ assert(str != NULL);
+ assert(size > 0);
+ assert(format != NULL);
++ /* We take a size_t as input but return an int. Sanity check
++ * our input so that it won't cause an overflow in the
++ * vsnprintf return value or the buffer malloc size. */
++ if (size > INT_MAX - _PyOS_vsnprintf_EXTRA_SPACE) {
++ len = -666;
++ goto Done;
++ }
+
+ #ifdef HAVE_SNPRINTF
+ len = vsnprintf(str, size, format, va);
+ #else
+ /* Emulate it. */
+- buffer = PyMem_MALLOC(size + 512);
++ buffer = PyMem_MALLOC(size + _PyOS_vsnprintf_EXTRA_SPACE);
+ if (buffer == NULL) {
+ len = -666;
+ goto Done;
+@@ -75,7 +85,7 @@
+ if (len < 0)
+ /* ignore the error */;
+
+- else if ((size_t)len >= size + 512)
++ else if ((size_t)len >= size + _PyOS_vsnprintf_EXTRA_SPACE)
+ Py_FatalError("Buffer overflow in PyOS_snprintf/PyOS_vsnprintf");
+
+ else {
+@@ -86,8 +96,10 @@
+ str[to_copy] = '\0';
+ }
+ PyMem_FREE(buffer);
+-Done:
+ #endif
+- str[size-1] = '\0';
++Done:
++ if (size > 0)
++ str[size-1] = '\0';
+ return len;
++#undef _PyOS_vsnprintf_EXTRA_SPACE
+ }
diff --git a/lang/python30/Makefile b/lang/python30/Makefile
index 6224b163004d..fa6cd7bd7061 100644
--- a/lang/python30/Makefile
+++ b/lang/python30/Makefile
@@ -6,7 +6,7 @@
PORTNAME= python25
PORTVERSION= 2.5.2
-PORTREVISION= 2
+PORTREVISION= 3
CATEGORIES= lang python ipv6
MASTER_SITES= ${PYTHON_MASTER_SITES}
MASTER_SITE_SUBDIR= ${PYTHON_MASTER_SITE_SUBDIR}
diff --git a/lang/python30/files/patch-lib-test_test_bigmem.py b/lang/python30/files/patch-lib-test_test_bigmem.py
new file mode 100644
index 000000000000..5ec5935a046f
--- /dev/null
+++ b/lang/python30/files/patch-lib-test_test_bigmem.py
@@ -0,0 +1,163 @@
+--- Lib/test/test_bigmem.py.orig 2007-11-30 21:53:17.000000000 +0000
++++ Lib/test/test_bigmem.py 2008-08-30 10:16:13.000000000 +0100
+@@ -1,5 +1,5 @@
+ from test import test_support
+-from test.test_support import bigmemtest, _1G, _2G
++from test.test_support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest
+
+ import unittest
+ import operator
+@@ -54,6 +54,22 @@
+ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
+ self.assertEquals(s.strip(), SUBSTR.strip())
+
++ @precisionbigmemtest(size=_2G - 1, memuse=1)
++ def test_center_unicode(self, size):
++ SUBSTR = u' abc def ghi'
++ try:
++ s = SUBSTR.center(size)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(s), size)
++ lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2
++ if len(s) % 2:
++ lpadsize += 1
++ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
++ self.assertEquals(s.strip(), SUBSTR.strip())
++ del s
++
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_count(self, size):
+ SUBSTR = ' abc def ghi'
+@@ -70,10 +86,44 @@
+ s = '.' * size
+ self.assertEquals(len(s.decode('utf-8')), size)
+
++ def basic_encode_test(self, size, enc, c=u'.', expectedsize=None):
++ if expectedsize is None:
++ expectedsize = size
++
++ s = c * size
++ self.assertEquals(len(s.encode(enc)), expectedsize)
++
+ @bigmemtest(minsize=_2G + 2, memuse=3)
+ def test_encode(self, size):
+- s = u'.' * size
+- self.assertEquals(len(s.encode('utf-8')), size)
++ return self.basic_encode_test(size, 'utf-8')
++
++ @precisionbigmemtest(size=_4G / 6 + 2, memuse=2)
++ def test_encode_raw_unicode_escape(self, size):
++ try:
++ return self.basic_encode_test(size, 'raw_unicode_escape')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_4G / 5 + 70, memuse=3)
++ def test_encode_utf7(self, size):
++ try:
++ return self.basic_encode_test(size, 'utf7')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_2G-1, memuse=2)
++ def test_decodeascii(self, size):
++ return self.basic_encode_test(size, 'ascii', c='A')
++
++ @precisionbigmemtest(size=_4G / 5, memuse=6+2)
++ def test_unicode_repr_oflw(self, size):
++ try:
++ s = u"\uAAAA"*size
++ r = repr(s)
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ self.failUnless(s == eval(r))
+
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_endswith(self, size):
+@@ -459,6 +509,11 @@
+ self.assertEquals(s.count('\\'), size)
+ self.assertEquals(s.count('0'), size * 2)
+
++ @bigmemtest(minsize=2**32 / 5, memuse=6+2)
++ def test_unicode_repr(self, size):
++ s = u"\uAAAA" * size
++ self.failUnless(len(repr(s)) > size)
++
+ # This test is meaningful even with size < 2G, as long as the
+ # doubled string is > 2G (but it tests more if both are > 2G :)
+ @bigmemtest(minsize=_1G + 2, memuse=3)
+@@ -642,6 +697,35 @@
+ def test_repeat_large(self, size):
+ return self.basic_test_repeat(size)
+
++ @bigmemtest(minsize=_1G - 1, memuse=12)
++ def test_repeat_large_2(self, size):
++ return self.basic_test_repeat(size)
++
++ @precisionbigmemtest(size=_1G - 1, memuse=9)
++ def test_from_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++
++ @precisionbigmemtest(size=_1G - 25, memuse=9)
++ def test_from_almost_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++ except MemoryError:
++ pass # acceptable, expected on 32-bit
++
+ # Like test_concat, split in two.
+ def basic_test_repr(self, size):
+ t = (0,) * size
+@@ -957,8 +1041,34 @@
+ self.assertEquals(l[:10], [1] * 10)
+ self.assertEquals(l[-10:], [5] * 10)
+
++class BufferTest(unittest.TestCase):
++
++ @precisionbigmemtest(size=_1G, memuse=4)
++ def test_repeat(self, size):
++ try:
++ b = buffer("AAAA")*size
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for c in b:
++ self.assertEquals(c, 'A')
++ count += 1
++ self.assertEquals(count, size*4)
++
+ def test_main():
+- test_support.run_unittest(StrTest, TupleTest, ListTest)
++ test_support.run_unittest(StrTest, TupleTest, ListTest, BufferTest)
++
++# Expected failures (crashers)
++# del StrTest.test_center_unicode
++del StrTest.test_decodeascii
++# del StrTest.test_encode_utf32
++# del StrTest.test_encode_utf7
++# del StrTest.test_encode_raw_unicode_escape
++#
++# del TupleTest.test_from_2G_generator
++#
++# del BufferTest.test_repeat
+
+ if __name__ == '__main__':
+ if len(sys.argv) > 1:
diff --git a/lang/python30/files/patch-lib-test_test_hashlib.py b/lang/python30/files/patch-lib-test_test_hashlib.py
new file mode 100644
index 000000000000..15ede2b17e8c
--- /dev/null
+++ b/lang/python30/files/patch-lib-test_test_hashlib.py
@@ -0,0 +1,41 @@
+--- Lib/test/test_hashlib.py.orig 2005-08-21 19:45:59.000000000 +0100
++++ Lib/test/test_hashlib.py 2008-08-30 10:43:27.000000000 +0100
+@@ -9,7 +9,7 @@
+ import hashlib
+ import unittest
+ from test import test_support
+-
++from test.test_support import _4G, precisionbigmemtest
+
+ def hexstr(s):
+ import string
+@@ -55,7 +55,6 @@
+ m2.update(aas + bees + cees)
+ self.assertEqual(m1.digest(), m2.digest())
+
+-
+ def check(self, name, data, digest):
+ # test the direct constructors
+ computed = getattr(hashlib, name)(data).hexdigest()
+@@ -75,6 +74,21 @@
+ self.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
+ 'd174ab98d277d9f5a5611c2c9f419d9f')
+
++ @precisionbigmemtest(size=_4G + 5, memuse=1)
++ def test_case_md5_huge(self, size):
++ if size == _4G + 5:
++ try:
++ self.check('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
++ except OverflowError:
++ pass # 32-bit arch
++
++ @precisionbigmemtest(size=_4G - 1, memuse=1)
++ def test_case_md5_uintmax(self, size):
++ if size == _4G - 1:
++ try:
++ self.check('md5', 'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
++ except OverflowError:
++ pass # 32-bit arch
+
+ # use the three examples from Federal Information Processing Standards
+ # Publication 180-1, Secure Hash Standard, 1995 April 17
diff --git a/lang/python30/files/patch-lib-test_test_strop.py b/lang/python30/files/patch-lib-test_test_strop.py
new file mode 100644
index 000000000000..f0e40166957a
--- /dev/null
+++ b/lang/python30/files/patch-lib-test_test_strop.py
@@ -0,0 +1,28 @@
+--- Lib/test/test_strop.py.orig 2002-07-31 00:27:12.000000000 +0100
++++ Lib/test/test_strop.py 2008-08-30 10:16:13.000000000 +0100
+@@ -115,6 +115,25 @@
+ strop.uppercase
+ strop.whitespace
+
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=5)
++ def test_stropjoin_huge_list(self, size):
++ a = "A" * size
++ try:
++ r = strop.join([a, a], a)
++ except OverflowError:
++ pass
++ else:
++ self.assertEquals(len(r), len(a) * 3)
++
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=1)
++ def test_stropjoin_huge_tup(self, size):
++ a = "A" * size
++ try:
++ r = strop.join((a, a), a)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(r), len(a) * 3)
+
+ transtable = '\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`xyzdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377'
+
diff --git a/lang/python30/files/patch-lib-test_test_support.py b/lang/python30/files/patch-lib-test_test_support.py
new file mode 100644
index 000000000000..b11933bc7eed
--- /dev/null
+++ b/lang/python30/files/patch-lib-test_test_support.py
@@ -0,0 +1,62 @@
+--- Lib/test/test_support.py.orig 2008-01-27 01:24:44.000000000 +0000
++++ Lib/test/test_support.py 2008-08-30 10:16:13.000000000 +0100
+@@ -33,6 +33,7 @@
+ use_resources = None # Flag set to [] by regrtest.py
+ max_memuse = 0 # Disable bigmem tests (they will still be run with
+ # small sizes, to make sure they work.)
++real_max_memuse = 0
+
+ # _original_stdout is meant to hold stdout at the time regrtest began.
+ # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
+@@ -323,6 +324,7 @@
+ _1M = 1024*1024
+ _1G = 1024 * _1M
+ _2G = 2 * _1G
++_4G = 4 * _1G
+
+ # Hack to get at the maximum value an internal index can take.
+ class _Dummy:
+@@ -333,6 +335,7 @@
+ def set_memlimit(limit):
+ import re
+ global max_memuse
++ global real_max_memuse
+ sizes = {
+ 'k': 1024,
+ 'm': _1M,
+@@ -344,6 +347,7 @@
+ if m is None:
+ raise ValueError('Invalid memory limit %r' % (limit,))
+ memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
++ real_max_memuse = memlimit
+ if memlimit > MAX_Py_ssize_t:
+ memlimit = MAX_Py_ssize_t
+ if memlimit < _2G - 1:
+@@ -389,6 +393,27 @@
+ return wrapper
+ return decorator
+
++def precisionbigmemtest(size, memuse, overhead=5*_1M):
++ def decorator(f):
++ def wrapper(self):
++ if not real_max_memuse:
++ maxsize = 5147
++ else:
++ maxsize = size
++
++ if real_max_memuse and real_max_memuse < maxsize * memuse:
++ if verbose:
++ sys.stderr.write("Skipping %s because of memory "
++ "constraint\n" % (f.__name__,))
++ return
++
++ return f(self, maxsize)
++ wrapper.size = size
++ wrapper.memuse = memuse
++ wrapper.overhead = overhead
++ return wrapper
++ return decorator
++
+ def bigaddrspacetest(f):
+ """Decorator for tests that fill the address space."""
+ def wrapper(self):
diff --git a/lang/python30/files/patch-lib_seq_tests.py b/lang/python30/files/patch-lib_seq_tests.py
new file mode 100644
index 000000000000..9be35ae82517
--- /dev/null
+++ b/lang/python30/files/patch-lib_seq_tests.py
@@ -0,0 +1,21 @@
+--- Lib/test/seq_tests.py.orig 2007-11-12 20:04:41.000000000 +0000
++++ Lib/test/seq_tests.py 2008-08-30 10:16:13.000000000 +0100
+@@ -307,11 +307,13 @@
+ self.assertEqual(id(s), id(s*1))
+
+ def test_bigrepeat(self):
+- x = self.type2test([0])
+- x *= 2**16
+- self.assertRaises(MemoryError, x.__mul__, 2**16)
+- if hasattr(x, '__imul__'):
+- self.assertRaises(MemoryError, x.__imul__, 2**16)
++ import sys
++ if sys.maxint <= 2147483647:
++ x = self.type2test([0])
++ x *= 2**16
++ self.assertRaises(MemoryError, x.__mul__, 2**16)
++ if hasattr(x, '__imul__'):
++ self.assertRaises(MemoryError, x.__imul__, 2**16)
+
+ def test_subscript(self):
+ a = self.type2test([10, 11])
diff --git a/lang/python30/files/patch-modules_almodule.c b/lang/python30/files/patch-modules_almodule.c
new file mode 100644
index 000000000000..8e3b9b11380d
--- /dev/null
+++ b/lang/python30/files/patch-modules_almodule.c
@@ -0,0 +1,14 @@
+--- Modules/almodule.c.orig 2006-09-25 07:53:42.000000000 +0100
++++ Modules/almodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -1633,9 +1633,11 @@
+ if (nvals < 0)
+ goto cleanup;
+ if (nvals > setsize) {
++ ALvalue *old_return_set = return_set;
+ setsize = nvals;
+ PyMem_RESIZE(return_set, ALvalue, setsize);
+ if (return_set == NULL) {
++ return_set = old_return_set;
+ PyErr_NoMemory();
+ goto cleanup;
+ }
diff --git a/lang/python30/files/patch-modules_arraymodule.c b/lang/python30/files/patch-modules_arraymodule.c
new file mode 100644
index 000000000000..738ee48f1485
--- /dev/null
+++ b/lang/python30/files/patch-modules_arraymodule.c
@@ -0,0 +1,33 @@
+--- Modules/arraymodule.c.orig 2008-02-15 19:11:46.000000000 +0000
++++ Modules/arraymodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -816,6 +816,7 @@
+ array_do_extend(arrayobject *self, PyObject *bb)
+ {
+ Py_ssize_t size;
++ char *old_item;
+
+ if (!array_Check(bb))
+ return array_iter_extend(self, bb);
+@@ -831,10 +832,11 @@
+ return -1;
+ }
+ size = self->ob_size + b->ob_size;
++ old_item = self->ob_item;
+ PyMem_RESIZE(self->ob_item, char, size*self->ob_descr->itemsize);
+ if (self->ob_item == NULL) {
+- PyObject_Del(self);
+- PyErr_NoMemory();
++ self->ob_item = old_item;
++ PyErr_NoMemory();
+ return -1;
+ }
+ memcpy(self->ob_item + self->ob_size*self->ob_descr->itemsize,
+@@ -886,7 +888,7 @@
+ if (size > PY_SSIZE_T_MAX / n) {
+ return PyErr_NoMemory();
+ }
+- PyMem_Resize(items, char, n * size);
++ PyMem_RESIZE(items, char, n * size);
+ if (items == NULL)
+ return PyErr_NoMemory();
+ p = items;
diff --git a/lang/python30/files/patch-modules_gcmodule.c b/lang/python30/files/patch-modules_gcmodule.c
new file mode 100644
index 000000000000..59253930e9c3
--- /dev/null
+++ b/lang/python30/files/patch-modules_gcmodule.c
@@ -0,0 +1,58 @@
+--- Include/pymem.h.orig 2008-02-14 11:26:18.000000000 +0000
++++ Include/pymem.h 2008-08-30 10:39:43.000000000 +0100
+@@ -67,8 +67,12 @@
+ for malloc(0), which would be treated as an error. Some platforms
+ would return a pointer with no memory behind it, which would break
+ pymalloc. To solve these problems, allocate an extra byte. */
+-#define PyMem_MALLOC(n) malloc((n) ? (n) : 1)
+-#define PyMem_REALLOC(p, n) realloc((p), (n) ? (n) : 1)
++/* Returns NULL to indicate error if a negative size or size larger than
++ Py_ssize_t can represent is supplied. Helps prevents security holes. */
++#define PyMem_MALLOC(n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : malloc((n) ? (n) : 1))
++#define PyMem_REALLOC(p, n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : realloc((p), (n) ? (n) : 1))
+ #define PyMem_FREE free
+
+ #endif /* PYMALLOC_DEBUG */
+@@ -77,24 +81,31 @@
+ * Type-oriented memory interface
+ * ==============================
+ *
+- * These are carried along for historical reasons. There's rarely a good
+- * reason to use them anymore (you can just as easily do the multiply and
+- * cast yourself).
++ * Allocate memory for n objects of the given type. Returns a new pointer
++ * or NULL if the request was too large or memory allocation failed. Use
++ * these macros rather than doing the multiplication yourself so that proper
++ * overflow checking is always done.
+ */
+
+ #define PyMem_New(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_Malloc((n) * sizeof(type)) ) )
+ #define PyMem_NEW(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_MALLOC((n) * sizeof(type)) ) )
+
++/*
++ * The value of (p) is always clobbered by this macro regardless of success.
++ * The caller MUST check if (p) is NULL afterwards and deal with the memory
++ * error if so. This means the original value of (p) MUST be saved for the
++ * caller's memory error handler to not lose track of it.
++ */
+ #define PyMem_Resize(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_Realloc((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_Realloc((p), (n) * sizeof(type)) )
+ #define PyMem_RESIZE(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_REALLOC((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_REALLOC((p), (n) * sizeof(type)) )
+
+ /* PyMem{Del,DEL} are left over from ancient days, and shouldn't be used
+ * anymore. They're just confusing aliases for PyMem_{Free,FREE} now.
diff --git a/lang/python30/files/patch-modules_hashopenssl.c b/lang/python30/files/patch-modules_hashopenssl.c
new file mode 100644
index 000000000000..1f0cd56a244a
--- /dev/null
+++ b/lang/python30/files/patch-modules_hashopenssl.c
@@ -0,0 +1,104 @@
+--- Modules/_hashopenssl.c.orig 2006-05-29 22:04:52.000000000 +0100
++++ Modules/_hashopenssl.c 2008-08-30 10:43:27.000000000 +0100
+@@ -19,6 +19,8 @@
+ /* EVP is the preferred interface to hashing in OpenSSL */
+ #include <openssl/evp.h>
+
++#define MUNCH_SIZE INT_MAX
++
+
+ #ifndef HASH_OBJ_CONSTRUCTOR
+ #define HASH_OBJ_CONSTRUCTOR 0
+@@ -164,9 +166,18 @@
+ if (!PyArg_ParseTuple(args, "s#:update", &cp, &len))
+ return NULL;
+
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
+-
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+@@ -255,9 +266,20 @@
+ self->name = name_obj;
+ Py_INCREF(self->name);
+
+- if (cp && len)
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return 0;
+ }
+@@ -328,7 +350,7 @@
+ static PyObject *
+ EVPnew(PyObject *name_obj,
+ const EVP_MD *digest, const EVP_MD_CTX *initial_ctx,
+- const unsigned char *cp, unsigned int len)
++ const unsigned char *cp, Py_ssize_t len)
+ {
+ EVPobject *self;
+
+@@ -346,8 +368,20 @@
+ EVP_DigestInit(&self->ctx, digest);
+ }
+
+- if (cp && len)
+- EVP_DigestUpdate(&self->ctx, cp, len);
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
++ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
++ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return (PyObject *)self;
+ }
+@@ -384,8 +418,7 @@
+
+ digest = EVP_get_digestbyname(name);
+
+- return EVPnew(name_obj, digest, NULL, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+- unsigned int));
++ return EVPnew(name_obj, digest, NULL, cp, len);
+ }
+
+ /*
+@@ -410,7 +443,7 @@
+ CONST_ ## NAME ## _name_obj, \
+ NULL, \
+ CONST_new_ ## NAME ## _ctx_p, \
+- cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, unsigned int)); \
++ cp, len); \
+ }
+
+ /* a PyMethodDef structure for the constructor */
diff --git a/lang/python30/files/patch-modules_mmapmodule.c b/lang/python30/files/patch-modules_mmapmodule.c
new file mode 100644
index 000000000000..60f3d71ff349
--- /dev/null
+++ b/lang/python30/files/patch-modules_mmapmodule.c
@@ -0,0 +1,11 @@
+--- Modules/mmapmodule.c.orig 2006-08-22 14:57:07.000000000 +0100
++++ Modules/mmapmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -223,7 +223,7 @@
+ return(NULL);
+
+ /* silently 'adjust' out-of-range requests */
+- if ((self->pos + num_bytes) > self->size) {
++ if (num_bytes > self->size - self->pos) {
+ num_bytes -= (self->pos+num_bytes) - self->size;
+ }
+ result = Py_BuildValue("s#", self->data+self->pos, num_bytes);
diff --git a/lang/python30/files/patch-modules_selectmodule.c b/lang/python30/files/patch-modules_selectmodule.c
new file mode 100644
index 000000000000..446241f05a97
--- /dev/null
+++ b/lang/python30/files/patch-modules_selectmodule.c
@@ -0,0 +1,16 @@
+--- Modules/selectmodule.c.orig 2006-07-10 02:18:57.000000000 +0100
++++ Modules/selectmodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -349,10 +349,12 @@
+ {
+ Py_ssize_t i, pos;
+ PyObject *key, *value;
++ struct pollfd *old_ufds = self->ufds;
+
+ self->ufd_len = PyDict_Size(self->dict);
+- PyMem_Resize(self->ufds, struct pollfd, self->ufd_len);
++ PyMem_RESIZE(self->ufds, struct pollfd, self->ufd_len);
+ if (self->ufds == NULL) {
++ self->ufds = old_ufds;
+ PyErr_NoMemory();
+ return 0;
+ }
diff --git a/lang/python30/files/patch-modules_stropmodule.c b/lang/python30/files/patch-modules_stropmodule.c
new file mode 100644
index 000000000000..d7f42ce18871
--- /dev/null
+++ b/lang/python30/files/patch-modules_stropmodule.c
@@ -0,0 +1,31 @@
+--- Modules/stropmodule.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Modules/stropmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -216,6 +216,13 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0)
+ return NULL;
+@@ -253,6 +260,14 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ Py_XDECREF(item);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0) {
+ Py_DECREF(item);
diff --git a/lang/python30/files/patch-objects_bufferobject.c b/lang/python30/files/patch-objects_bufferobject.c
new file mode 100644
index 000000000000..16e99568ad9a
--- /dev/null
+++ b/lang/python30/files/patch-objects_bufferobject.c
@@ -0,0 +1,13 @@
+--- Objects/bufferobject.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/bufferobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -427,6 +427,10 @@
+ count = 0;
+ if (!get_buf(self, &ptr, &size, ANY_BUFFER))
+ return NULL;
++ if (count > PY_SSIZE_T_MAX / size) {
++ PyErr_SetString(PyExc_MemoryError, "result too large");
++ return NULL;
++ }
+ ob = PyString_FromStringAndSize(NULL, size * count);
+ if ( ob == NULL )
+ return NULL;
diff --git a/lang/python30/files/patch-objects_longobject.c b/lang/python30/files/patch-objects_longobject.c
new file mode 100644
index 000000000000..1221db9fed0a
--- /dev/null
+++ b/lang/python30/files/patch-objects_longobject.c
@@ -0,0 +1,11 @@
+--- Objects/longobject.c.orig 2007-05-07 19:30:48.000000000 +0100
++++ Objects/longobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -70,6 +70,8 @@
+ PyErr_NoMemory();
+ return NULL;
+ }
++ /* XXX(nnorwitz): This can overflow --
++ PyObject_NEW_VAR / _PyObject_VAR_SIZE need to detect overflow */
+ return PyObject_NEW_VAR(PyLongObject, &PyLong_Type, size);
+ }
+
diff --git a/lang/python30/files/patch-objects_obmalloc.c b/lang/python30/files/patch-objects_obmalloc.c
new file mode 100644
index 000000000000..27050596fbfc
--- /dev/null
+++ b/lang/python30/files/patch-objects_obmalloc.c
@@ -0,0 +1,34 @@
+--- Objects/obmalloc.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/obmalloc.c 2008-08-30 10:39:43.000000000 +0100
+@@ -727,6 +727,15 @@
+ uint size;
+
+ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
++ /*
+ * This implicitly redirects malloc(0).
+ */
+ if ((nbytes - 1) < SMALL_REQUEST_THRESHOLD) {
+@@ -1130,6 +1139,15 @@
+ if (p == NULL)
+ return PyObject_Malloc(nbytes);
+
++ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
+ pool = POOL_ADDR(p);
+ if (Py_ADDRESS_IN_RANGE(p, pool)) {
+ /* We're in charge of this block */
diff --git a/lang/python30/files/patch-objects_stringobject.c b/lang/python30/files/patch-objects_stringobject.c
new file mode 100644
index 000000000000..af55c78a4928
--- /dev/null
+++ b/lang/python30/files/patch-objects_stringobject.c
@@ -0,0 +1,49 @@
+--- Objects/stringobject.c.orig 2007-11-07 01:19:49.000000000 +0000
++++ Objects/stringobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -71,6 +71,11 @@
+ return (PyObject *)op;
+ }
+
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError, "string is too large");
++ return NULL;
++ }
++
+ /* Inline PyObject_NewVar */
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+@@ -106,7 +111,7 @@
+
+ assert(str != NULL);
+ size = strlen(str);
+- if (size > PY_SSIZE_T_MAX) {
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
+ PyErr_SetString(PyExc_OverflowError,
+ "string is too long for a Python string");
+ return NULL;
+@@ -967,14 +972,24 @@
+ Py_INCREF(a);
+ return (PyObject *)a;
+ }
++ /* Check that string sizes are not negative, to prevent an
++ overflow in cases where we are passed incorrectly-created
++ strings with negative lengths (due to a bug in other code).
++ */
+ size = a->ob_size + b->ob_size;
+- if (size < 0) {
++ if (a->ob_size < 0 || b->ob_size < 0 ||
++ a->ob_size > PY_SSIZE_T_MAX - b->ob_size) {
+ PyErr_SetString(PyExc_OverflowError,
+ "strings are too large to concat");
+ return NULL;
+ }
+
+ /* Inline PyObject_NewVar */
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError,
++ "strings are too large to concat");
++ return NULL;
++ }
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+ return PyErr_NoMemory();
diff --git a/lang/python30/files/patch-objects_tupleobject.c b/lang/python30/files/patch-objects_tupleobject.c
new file mode 100644
index 000000000000..eb133b6e002c
--- /dev/null
+++ b/lang/python30/files/patch-objects_tupleobject.c
@@ -0,0 +1,17 @@
+--- Objects/tupleobject.c.orig 2006-08-12 18:03:09.000000000 +0100
++++ Objects/tupleobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -60,11 +60,12 @@
+ Py_ssize_t nbytes = size * sizeof(PyObject *);
+ /* Check for overflow */
+ if (nbytes / sizeof(PyObject *) != (size_t)size ||
+- (nbytes += sizeof(PyTupleObject) - sizeof(PyObject *))
+- <= 0)
++ (nbytes > PY_SSIZE_T_MAX - sizeof(PyTupleObject) - sizeof(PyObject *)))
+ {
+ return PyErr_NoMemory();
+ }
++ nbytes += sizeof(PyTupleObject) - sizeof(PyObject *);
++
+ op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
+ if (op == NULL)
+ return NULL;
diff --git a/lang/python30/files/patch-objects_unicodeobject.c b/lang/python30/files/patch-objects_unicodeobject.c
new file mode 100644
index 000000000000..85e88caae0e3
--- /dev/null
+++ b/lang/python30/files/patch-objects_unicodeobject.c
@@ -0,0 +1,115 @@
+--- Objects/unicodeobject.c.orig 2007-11-02 22:46:38.000000000 +0000
++++ Objects/unicodeobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -239,6 +239,11 @@
+ return unicode_empty;
+ }
+
++ /* Ensure we won't overflow the size. */
++ if (length > ((PY_SSIZE_T_MAX / sizeof(Py_UNICODE)) - 1)) {
++ return (PyUnicodeObject *)PyErr_NoMemory();
++ }
++
+ /* Unicode freelist & memory allocation */
+ if (unicode_freelist) {
+ unicode = unicode_freelist;
+@@ -1091,6 +1096,9 @@
+ char * out;
+ char * start;
+
++ if (cbAllocated / 5 != size)
++ return PyErr_NoMemory();
++
+ if (size == 0)
+ return PyString_FromStringAndSize(NULL, 0);
+
+@@ -1689,8 +1697,9 @@
+ {
+ PyObject *v;
+ unsigned char *p;
++ Py_ssize_t nsize, bytesize;
+ #ifdef Py_UNICODE_WIDE
+- int i, pairs;
++ Py_ssize_t i, pairs;
+ #else
+ const int pairs = 0;
+ #endif
+@@ -1713,8 +1722,15 @@
+ if (s[i] >= 0x10000)
+ pairs++;
+ #endif
+- v = PyString_FromStringAndSize(NULL,
+- 2 * (size + pairs + (byteorder == 0)));
++ /* 2 * (size + pairs + (byteorder == 0)) */
++ if (size > PY_SSIZE_T_MAX ||
++ size > PY_SSIZE_T_MAX - pairs - (byteorder == 0))
++ return PyErr_NoMemory();
++ nsize = (size + pairs + (byteorder == 0));
++ bytesize = nsize * 2;
++ if (bytesize / 2 != nsize)
++ return PyErr_NoMemory();
++ v = PyString_FromStringAndSize(NULL, bytesize);
+ if (v == NULL)
+ return NULL;
+
+@@ -2042,6 +2058,11 @@
+ char *p;
+
+ static const char *hexdigit = "0123456789abcdef";
++#ifdef Py_UNICODE_WIDE
++ const Py_ssize_t expandsize = 10;
++#else
++ const Py_ssize_t expandsize = 6;
++#endif
+
+ /* Initial allocation is based on the longest-possible unichr
+ escape.
+@@ -2057,13 +2078,12 @@
+ escape.
+ */
+
++ if (size > (PY_SSIZE_T_MAX - 2 - 1) / expandsize)
++ return PyErr_NoMemory();
++
+ repr = PyString_FromStringAndSize(NULL,
+ 2
+-#ifdef Py_UNICODE_WIDE
+- + 10*size
+-#else
+- + 6*size
+-#endif
++ + expandsize*size
+ + 1);
+ if (repr == NULL)
+ return NULL;
+@@ -2304,12 +2324,16 @@
+ char *q;
+
+ static const char *hexdigit = "0123456789abcdef";
+-
+ #ifdef Py_UNICODE_WIDE
+- repr = PyString_FromStringAndSize(NULL, 10 * size);
++ const Py_ssize_t expandsize = 10;
+ #else
+- repr = PyString_FromStringAndSize(NULL, 6 * size);
++ const Py_ssize_t expandsize = 6;
+ #endif
++
++ if (size > PY_SSIZE_T_MAX / expandsize)
++ return PyErr_NoMemory();
++
++ repr = PyString_FromStringAndSize(NULL, expandsize * size);
+ if (repr == NULL)
+ return NULL;
+ if (size == 0)
+@@ -4719,6 +4743,11 @@
+ return self;
+ }
+
++ if (left > PY_SSIZE_T_MAX - self->length ||
++ right > PY_SSIZE_T_MAX - (left + self->length)) {
++ PyErr_SetString(PyExc_OverflowError, "padded string is too long");
++ return NULL;
++ }
+ u = _PyUnicode_New(left + self->length + right);
+ if (u) {
+ if (left)
diff --git a/lang/python30/files/patch-python_mysnprintf.c b/lang/python30/files/patch-python_mysnprintf.c
new file mode 100644
index 000000000000..276dd21a1b31
--- /dev/null
+++ b/lang/python30/files/patch-python_mysnprintf.c
@@ -0,0 +1,55 @@
+--- Python/mysnprintf.c.orig 2001-12-21 16:32:15.000000000 +0000
++++ Python/mysnprintf.c 2008-08-30 10:46:31.000000000 +0100
+@@ -54,18 +54,28 @@
+ PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va)
+ {
+ int len; /* # bytes written, excluding \0 */
+-#ifndef HAVE_SNPRINTF
++#ifdef HAVE_SNPRINTF
++#define _PyOS_vsnprintf_EXTRA_SPACE 1
++#else
++#define _PyOS_vsnprintf_EXTRA_SPACE 512
+ char *buffer;
+ #endif
+ assert(str != NULL);
+ assert(size > 0);
+ assert(format != NULL);
++ /* We take a size_t as input but return an int. Sanity check
++ * our input so that it won't cause an overflow in the
++ * vsnprintf return value or the buffer malloc size. */
++ if (size > INT_MAX - _PyOS_vsnprintf_EXTRA_SPACE) {
++ len = -666;
++ goto Done;
++ }
+
+ #ifdef HAVE_SNPRINTF
+ len = vsnprintf(str, size, format, va);
+ #else
+ /* Emulate it. */
+- buffer = PyMem_MALLOC(size + 512);
++ buffer = PyMem_MALLOC(size + _PyOS_vsnprintf_EXTRA_SPACE);
+ if (buffer == NULL) {
+ len = -666;
+ goto Done;
+@@ -75,7 +85,7 @@
+ if (len < 0)
+ /* ignore the error */;
+
+- else if ((size_t)len >= size + 512)
++ else if ((size_t)len >= size + _PyOS_vsnprintf_EXTRA_SPACE)
+ Py_FatalError("Buffer overflow in PyOS_snprintf/PyOS_vsnprintf");
+
+ else {
+@@ -86,8 +96,10 @@
+ str[to_copy] = '\0';
+ }
+ PyMem_FREE(buffer);
+-Done:
+ #endif
+- str[size-1] = '\0';
++Done:
++ if (size > 0)
++ str[size-1] = '\0';
+ return len;
++#undef _PyOS_vsnprintf_EXTRA_SPACE
+ }
diff --git a/lang/python31/Makefile b/lang/python31/Makefile
index 6224b163004d..fa6cd7bd7061 100644
--- a/lang/python31/Makefile
+++ b/lang/python31/Makefile
@@ -6,7 +6,7 @@
PORTNAME= python25
PORTVERSION= 2.5.2
-PORTREVISION= 2
+PORTREVISION= 3
CATEGORIES= lang python ipv6
MASTER_SITES= ${PYTHON_MASTER_SITES}
MASTER_SITE_SUBDIR= ${PYTHON_MASTER_SITE_SUBDIR}
diff --git a/lang/python31/files/patch-lib-test_test_bigmem.py b/lang/python31/files/patch-lib-test_test_bigmem.py
new file mode 100644
index 000000000000..5ec5935a046f
--- /dev/null
+++ b/lang/python31/files/patch-lib-test_test_bigmem.py
@@ -0,0 +1,163 @@
+--- Lib/test/test_bigmem.py.orig 2007-11-30 21:53:17.000000000 +0000
++++ Lib/test/test_bigmem.py 2008-08-30 10:16:13.000000000 +0100
+@@ -1,5 +1,5 @@
+ from test import test_support
+-from test.test_support import bigmemtest, _1G, _2G
++from test.test_support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest
+
+ import unittest
+ import operator
+@@ -54,6 +54,22 @@
+ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
+ self.assertEquals(s.strip(), SUBSTR.strip())
+
++ @precisionbigmemtest(size=_2G - 1, memuse=1)
++ def test_center_unicode(self, size):
++ SUBSTR = u' abc def ghi'
++ try:
++ s = SUBSTR.center(size)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(s), size)
++ lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2
++ if len(s) % 2:
++ lpadsize += 1
++ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
++ self.assertEquals(s.strip(), SUBSTR.strip())
++ del s
++
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_count(self, size):
+ SUBSTR = ' abc def ghi'
+@@ -70,10 +86,44 @@
+ s = '.' * size
+ self.assertEquals(len(s.decode('utf-8')), size)
+
++ def basic_encode_test(self, size, enc, c=u'.', expectedsize=None):
++ if expectedsize is None:
++ expectedsize = size
++
++ s = c * size
++ self.assertEquals(len(s.encode(enc)), expectedsize)
++
+ @bigmemtest(minsize=_2G + 2, memuse=3)
+ def test_encode(self, size):
+- s = u'.' * size
+- self.assertEquals(len(s.encode('utf-8')), size)
++ return self.basic_encode_test(size, 'utf-8')
++
++ @precisionbigmemtest(size=_4G / 6 + 2, memuse=2)
++ def test_encode_raw_unicode_escape(self, size):
++ try:
++ return self.basic_encode_test(size, 'raw_unicode_escape')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_4G / 5 + 70, memuse=3)
++ def test_encode_utf7(self, size):
++ try:
++ return self.basic_encode_test(size, 'utf7')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_2G-1, memuse=2)
++ def test_decodeascii(self, size):
++ return self.basic_encode_test(size, 'ascii', c='A')
++
++ @precisionbigmemtest(size=_4G / 5, memuse=6+2)
++ def test_unicode_repr_oflw(self, size):
++ try:
++ s = u"\uAAAA"*size
++ r = repr(s)
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ self.failUnless(s == eval(r))
+
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_endswith(self, size):
+@@ -459,6 +509,11 @@
+ self.assertEquals(s.count('\\'), size)
+ self.assertEquals(s.count('0'), size * 2)
+
++ @bigmemtest(minsize=2**32 / 5, memuse=6+2)
++ def test_unicode_repr(self, size):
++ s = u"\uAAAA" * size
++ self.failUnless(len(repr(s)) > size)
++
+ # This test is meaningful even with size < 2G, as long as the
+ # doubled string is > 2G (but it tests more if both are > 2G :)
+ @bigmemtest(minsize=_1G + 2, memuse=3)
+@@ -642,6 +697,35 @@
+ def test_repeat_large(self, size):
+ return self.basic_test_repeat(size)
+
++ @bigmemtest(minsize=_1G - 1, memuse=12)
++ def test_repeat_large_2(self, size):
++ return self.basic_test_repeat(size)
++
++ @precisionbigmemtest(size=_1G - 1, memuse=9)
++ def test_from_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++
++ @precisionbigmemtest(size=_1G - 25, memuse=9)
++ def test_from_almost_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++ except MemoryError:
++ pass # acceptable, expected on 32-bit
++
+ # Like test_concat, split in two.
+ def basic_test_repr(self, size):
+ t = (0,) * size
+@@ -957,8 +1041,34 @@
+ self.assertEquals(l[:10], [1] * 10)
+ self.assertEquals(l[-10:], [5] * 10)
+
++class BufferTest(unittest.TestCase):
++
++ @precisionbigmemtest(size=_1G, memuse=4)
++ def test_repeat(self, size):
++ try:
++ b = buffer("AAAA")*size
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for c in b:
++ self.assertEquals(c, 'A')
++ count += 1
++ self.assertEquals(count, size*4)
++
+ def test_main():
+- test_support.run_unittest(StrTest, TupleTest, ListTest)
++ test_support.run_unittest(StrTest, TupleTest, ListTest, BufferTest)
++
++# Expected failures (crashers)
++# del StrTest.test_center_unicode
++del StrTest.test_decodeascii
++# del StrTest.test_encode_utf32
++# del StrTest.test_encode_utf7
++# del StrTest.test_encode_raw_unicode_escape
++#
++# del TupleTest.test_from_2G_generator
++#
++# del BufferTest.test_repeat
+
+ if __name__ == '__main__':
+ if len(sys.argv) > 1:
diff --git a/lang/python31/files/patch-lib-test_test_hashlib.py b/lang/python31/files/patch-lib-test_test_hashlib.py
new file mode 100644
index 000000000000..15ede2b17e8c
--- /dev/null
+++ b/lang/python31/files/patch-lib-test_test_hashlib.py
@@ -0,0 +1,41 @@
+--- Lib/test/test_hashlib.py.orig 2005-08-21 19:45:59.000000000 +0100
++++ Lib/test/test_hashlib.py 2008-08-30 10:43:27.000000000 +0100
+@@ -9,7 +9,7 @@
+ import hashlib
+ import unittest
+ from test import test_support
+-
++from test.test_support import _4G, precisionbigmemtest
+
+ def hexstr(s):
+ import string
+@@ -55,7 +55,6 @@
+ m2.update(aas + bees + cees)
+ self.assertEqual(m1.digest(), m2.digest())
+
+-
+ def check(self, name, data, digest):
+ # test the direct constructors
+ computed = getattr(hashlib, name)(data).hexdigest()
+@@ -75,6 +74,21 @@
+ self.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
+ 'd174ab98d277d9f5a5611c2c9f419d9f')
+
++ @precisionbigmemtest(size=_4G + 5, memuse=1)
++ def test_case_md5_huge(self, size):
++ if size == _4G + 5:
++ try:
++ self.check('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
++ except OverflowError:
++ pass # 32-bit arch
++
++ @precisionbigmemtest(size=_4G - 1, memuse=1)
++ def test_case_md5_uintmax(self, size):
++ if size == _4G - 1:
++ try:
++ self.check('md5', 'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
++ except OverflowError:
++ pass # 32-bit arch
+
+ # use the three examples from Federal Information Processing Standards
+ # Publication 180-1, Secure Hash Standard, 1995 April 17
diff --git a/lang/python31/files/patch-lib-test_test_strop.py b/lang/python31/files/patch-lib-test_test_strop.py
new file mode 100644
index 000000000000..f0e40166957a
--- /dev/null
+++ b/lang/python31/files/patch-lib-test_test_strop.py
@@ -0,0 +1,28 @@
+--- Lib/test/test_strop.py.orig 2002-07-31 00:27:12.000000000 +0100
++++ Lib/test/test_strop.py 2008-08-30 10:16:13.000000000 +0100
+@@ -115,6 +115,25 @@
+ strop.uppercase
+ strop.whitespace
+
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=5)
++ def test_stropjoin_huge_list(self, size):
++ a = "A" * size
++ try:
++ r = strop.join([a, a], a)
++ except OverflowError:
++ pass
++ else:
++ self.assertEquals(len(r), len(a) * 3)
++
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=1)
++ def test_stropjoin_huge_tup(self, size):
++ a = "A" * size
++ try:
++ r = strop.join((a, a), a)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(r), len(a) * 3)
+
+ transtable = '\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`xyzdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377'
+
diff --git a/lang/python31/files/patch-lib-test_test_support.py b/lang/python31/files/patch-lib-test_test_support.py
new file mode 100644
index 000000000000..b11933bc7eed
--- /dev/null
+++ b/lang/python31/files/patch-lib-test_test_support.py
@@ -0,0 +1,62 @@
+--- Lib/test/test_support.py.orig 2008-01-27 01:24:44.000000000 +0000
++++ Lib/test/test_support.py 2008-08-30 10:16:13.000000000 +0100
+@@ -33,6 +33,7 @@
+ use_resources = None # Flag set to [] by regrtest.py
+ max_memuse = 0 # Disable bigmem tests (they will still be run with
+ # small sizes, to make sure they work.)
++real_max_memuse = 0
+
+ # _original_stdout is meant to hold stdout at the time regrtest began.
+ # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
+@@ -323,6 +324,7 @@
+ _1M = 1024*1024
+ _1G = 1024 * _1M
+ _2G = 2 * _1G
++_4G = 4 * _1G
+
+ # Hack to get at the maximum value an internal index can take.
+ class _Dummy:
+@@ -333,6 +335,7 @@
+ def set_memlimit(limit):
+ import re
+ global max_memuse
++ global real_max_memuse
+ sizes = {
+ 'k': 1024,
+ 'm': _1M,
+@@ -344,6 +347,7 @@
+ if m is None:
+ raise ValueError('Invalid memory limit %r' % (limit,))
+ memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
++ real_max_memuse = memlimit
+ if memlimit > MAX_Py_ssize_t:
+ memlimit = MAX_Py_ssize_t
+ if memlimit < _2G - 1:
+@@ -389,6 +393,27 @@
+ return wrapper
+ return decorator
+
++def precisionbigmemtest(size, memuse, overhead=5*_1M):
++ def decorator(f):
++ def wrapper(self):
++ if not real_max_memuse:
++ maxsize = 5147
++ else:
++ maxsize = size
++
++ if real_max_memuse and real_max_memuse < maxsize * memuse:
++ if verbose:
++ sys.stderr.write("Skipping %s because of memory "
++ "constraint\n" % (f.__name__,))
++ return
++
++ return f(self, maxsize)
++ wrapper.size = size
++ wrapper.memuse = memuse
++ wrapper.overhead = overhead
++ return wrapper
++ return decorator
++
+ def bigaddrspacetest(f):
+ """Decorator for tests that fill the address space."""
+ def wrapper(self):
diff --git a/lang/python31/files/patch-lib_seq_tests.py b/lang/python31/files/patch-lib_seq_tests.py
new file mode 100644
index 000000000000..9be35ae82517
--- /dev/null
+++ b/lang/python31/files/patch-lib_seq_tests.py
@@ -0,0 +1,21 @@
+--- Lib/test/seq_tests.py.orig 2007-11-12 20:04:41.000000000 +0000
++++ Lib/test/seq_tests.py 2008-08-30 10:16:13.000000000 +0100
+@@ -307,11 +307,13 @@
+ self.assertEqual(id(s), id(s*1))
+
+ def test_bigrepeat(self):
+- x = self.type2test([0])
+- x *= 2**16
+- self.assertRaises(MemoryError, x.__mul__, 2**16)
+- if hasattr(x, '__imul__'):
+- self.assertRaises(MemoryError, x.__imul__, 2**16)
++ import sys
++ if sys.maxint <= 2147483647:
++ x = self.type2test([0])
++ x *= 2**16
++ self.assertRaises(MemoryError, x.__mul__, 2**16)
++ if hasattr(x, '__imul__'):
++ self.assertRaises(MemoryError, x.__imul__, 2**16)
+
+ def test_subscript(self):
+ a = self.type2test([10, 11])
diff --git a/lang/python31/files/patch-modules_almodule.c b/lang/python31/files/patch-modules_almodule.c
new file mode 100644
index 000000000000..8e3b9b11380d
--- /dev/null
+++ b/lang/python31/files/patch-modules_almodule.c
@@ -0,0 +1,14 @@
+--- Modules/almodule.c.orig 2006-09-25 07:53:42.000000000 +0100
++++ Modules/almodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -1633,9 +1633,11 @@
+ if (nvals < 0)
+ goto cleanup;
+ if (nvals > setsize) {
++ ALvalue *old_return_set = return_set;
+ setsize = nvals;
+ PyMem_RESIZE(return_set, ALvalue, setsize);
+ if (return_set == NULL) {
++ return_set = old_return_set;
+ PyErr_NoMemory();
+ goto cleanup;
+ }
diff --git a/lang/python31/files/patch-modules_arraymodule.c b/lang/python31/files/patch-modules_arraymodule.c
new file mode 100644
index 000000000000..738ee48f1485
--- /dev/null
+++ b/lang/python31/files/patch-modules_arraymodule.c
@@ -0,0 +1,33 @@
+--- Modules/arraymodule.c.orig 2008-02-15 19:11:46.000000000 +0000
++++ Modules/arraymodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -816,6 +816,7 @@
+ array_do_extend(arrayobject *self, PyObject *bb)
+ {
+ Py_ssize_t size;
++ char *old_item;
+
+ if (!array_Check(bb))
+ return array_iter_extend(self, bb);
+@@ -831,10 +832,11 @@
+ return -1;
+ }
+ size = self->ob_size + b->ob_size;
++ old_item = self->ob_item;
+ PyMem_RESIZE(self->ob_item, char, size*self->ob_descr->itemsize);
+ if (self->ob_item == NULL) {
+- PyObject_Del(self);
+- PyErr_NoMemory();
++ self->ob_item = old_item;
++ PyErr_NoMemory();
+ return -1;
+ }
+ memcpy(self->ob_item + self->ob_size*self->ob_descr->itemsize,
+@@ -886,7 +888,7 @@
+ if (size > PY_SSIZE_T_MAX / n) {
+ return PyErr_NoMemory();
+ }
+- PyMem_Resize(items, char, n * size);
++ PyMem_RESIZE(items, char, n * size);
+ if (items == NULL)
+ return PyErr_NoMemory();
+ p = items;
diff --git a/lang/python31/files/patch-modules_gcmodule.c b/lang/python31/files/patch-modules_gcmodule.c
new file mode 100644
index 000000000000..59253930e9c3
--- /dev/null
+++ b/lang/python31/files/patch-modules_gcmodule.c
@@ -0,0 +1,58 @@
+--- Include/pymem.h.orig 2008-02-14 11:26:18.000000000 +0000
++++ Include/pymem.h 2008-08-30 10:39:43.000000000 +0100
+@@ -67,8 +67,12 @@
+ for malloc(0), which would be treated as an error. Some platforms
+ would return a pointer with no memory behind it, which would break
+ pymalloc. To solve these problems, allocate an extra byte. */
+-#define PyMem_MALLOC(n) malloc((n) ? (n) : 1)
+-#define PyMem_REALLOC(p, n) realloc((p), (n) ? (n) : 1)
++/* Returns NULL to indicate error if a negative size or size larger than
++ Py_ssize_t can represent is supplied. Helps prevents security holes. */
++#define PyMem_MALLOC(n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : malloc((n) ? (n) : 1))
++#define PyMem_REALLOC(p, n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : realloc((p), (n) ? (n) : 1))
+ #define PyMem_FREE free
+
+ #endif /* PYMALLOC_DEBUG */
+@@ -77,24 +81,31 @@
+ * Type-oriented memory interface
+ * ==============================
+ *
+- * These are carried along for historical reasons. There's rarely a good
+- * reason to use them anymore (you can just as easily do the multiply and
+- * cast yourself).
++ * Allocate memory for n objects of the given type. Returns a new pointer
++ * or NULL if the request was too large or memory allocation failed. Use
++ * these macros rather than doing the multiplication yourself so that proper
++ * overflow checking is always done.
+ */
+
+ #define PyMem_New(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_Malloc((n) * sizeof(type)) ) )
+ #define PyMem_NEW(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_MALLOC((n) * sizeof(type)) ) )
+
++/*
++ * The value of (p) is always clobbered by this macro regardless of success.
++ * The caller MUST check if (p) is NULL afterwards and deal with the memory
++ * error if so. This means the original value of (p) MUST be saved for the
++ * caller's memory error handler to not lose track of it.
++ */
+ #define PyMem_Resize(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_Realloc((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_Realloc((p), (n) * sizeof(type)) )
+ #define PyMem_RESIZE(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_REALLOC((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_REALLOC((p), (n) * sizeof(type)) )
+
+ /* PyMem{Del,DEL} are left over from ancient days, and shouldn't be used
+ * anymore. They're just confusing aliases for PyMem_{Free,FREE} now.
diff --git a/lang/python31/files/patch-modules_hashopenssl.c b/lang/python31/files/patch-modules_hashopenssl.c
new file mode 100644
index 000000000000..1f0cd56a244a
--- /dev/null
+++ b/lang/python31/files/patch-modules_hashopenssl.c
@@ -0,0 +1,104 @@
+--- Modules/_hashopenssl.c.orig 2006-05-29 22:04:52.000000000 +0100
++++ Modules/_hashopenssl.c 2008-08-30 10:43:27.000000000 +0100
+@@ -19,6 +19,8 @@
+ /* EVP is the preferred interface to hashing in OpenSSL */
+ #include <openssl/evp.h>
+
++#define MUNCH_SIZE INT_MAX
++
+
+ #ifndef HASH_OBJ_CONSTRUCTOR
+ #define HASH_OBJ_CONSTRUCTOR 0
+@@ -164,9 +166,18 @@
+ if (!PyArg_ParseTuple(args, "s#:update", &cp, &len))
+ return NULL;
+
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
+-
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+@@ -255,9 +266,20 @@
+ self->name = name_obj;
+ Py_INCREF(self->name);
+
+- if (cp && len)
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return 0;
+ }
+@@ -328,7 +350,7 @@
+ static PyObject *
+ EVPnew(PyObject *name_obj,
+ const EVP_MD *digest, const EVP_MD_CTX *initial_ctx,
+- const unsigned char *cp, unsigned int len)
++ const unsigned char *cp, Py_ssize_t len)
+ {
+ EVPobject *self;
+
+@@ -346,8 +368,20 @@
+ EVP_DigestInit(&self->ctx, digest);
+ }
+
+- if (cp && len)
+- EVP_DigestUpdate(&self->ctx, cp, len);
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
++ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
++ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return (PyObject *)self;
+ }
+@@ -384,8 +418,7 @@
+
+ digest = EVP_get_digestbyname(name);
+
+- return EVPnew(name_obj, digest, NULL, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+- unsigned int));
++ return EVPnew(name_obj, digest, NULL, cp, len);
+ }
+
+ /*
+@@ -410,7 +443,7 @@
+ CONST_ ## NAME ## _name_obj, \
+ NULL, \
+ CONST_new_ ## NAME ## _ctx_p, \
+- cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, unsigned int)); \
++ cp, len); \
+ }
+
+ /* a PyMethodDef structure for the constructor */
diff --git a/lang/python31/files/patch-modules_mmapmodule.c b/lang/python31/files/patch-modules_mmapmodule.c
new file mode 100644
index 000000000000..60f3d71ff349
--- /dev/null
+++ b/lang/python31/files/patch-modules_mmapmodule.c
@@ -0,0 +1,11 @@
+--- Modules/mmapmodule.c.orig 2006-08-22 14:57:07.000000000 +0100
++++ Modules/mmapmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -223,7 +223,7 @@
+ return(NULL);
+
+ /* silently 'adjust' out-of-range requests */
+- if ((self->pos + num_bytes) > self->size) {
++ if (num_bytes > self->size - self->pos) {
+ num_bytes -= (self->pos+num_bytes) - self->size;
+ }
+ result = Py_BuildValue("s#", self->data+self->pos, num_bytes);
diff --git a/lang/python31/files/patch-modules_selectmodule.c b/lang/python31/files/patch-modules_selectmodule.c
new file mode 100644
index 000000000000..446241f05a97
--- /dev/null
+++ b/lang/python31/files/patch-modules_selectmodule.c
@@ -0,0 +1,16 @@
+--- Modules/selectmodule.c.orig 2006-07-10 02:18:57.000000000 +0100
++++ Modules/selectmodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -349,10 +349,12 @@
+ {
+ Py_ssize_t i, pos;
+ PyObject *key, *value;
++ struct pollfd *old_ufds = self->ufds;
+
+ self->ufd_len = PyDict_Size(self->dict);
+- PyMem_Resize(self->ufds, struct pollfd, self->ufd_len);
++ PyMem_RESIZE(self->ufds, struct pollfd, self->ufd_len);
+ if (self->ufds == NULL) {
++ self->ufds = old_ufds;
+ PyErr_NoMemory();
+ return 0;
+ }
diff --git a/lang/python31/files/patch-modules_stropmodule.c b/lang/python31/files/patch-modules_stropmodule.c
new file mode 100644
index 000000000000..d7f42ce18871
--- /dev/null
+++ b/lang/python31/files/patch-modules_stropmodule.c
@@ -0,0 +1,31 @@
+--- Modules/stropmodule.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Modules/stropmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -216,6 +216,13 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0)
+ return NULL;
+@@ -253,6 +260,14 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ Py_XDECREF(item);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0) {
+ Py_DECREF(item);
diff --git a/lang/python31/files/patch-objects_bufferobject.c b/lang/python31/files/patch-objects_bufferobject.c
new file mode 100644
index 000000000000..16e99568ad9a
--- /dev/null
+++ b/lang/python31/files/patch-objects_bufferobject.c
@@ -0,0 +1,13 @@
+--- Objects/bufferobject.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/bufferobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -427,6 +427,10 @@
+ count = 0;
+ if (!get_buf(self, &ptr, &size, ANY_BUFFER))
+ return NULL;
++ if (count > PY_SSIZE_T_MAX / size) {
++ PyErr_SetString(PyExc_MemoryError, "result too large");
++ return NULL;
++ }
+ ob = PyString_FromStringAndSize(NULL, size * count);
+ if ( ob == NULL )
+ return NULL;
diff --git a/lang/python31/files/patch-objects_longobject.c b/lang/python31/files/patch-objects_longobject.c
new file mode 100644
index 000000000000..1221db9fed0a
--- /dev/null
+++ b/lang/python31/files/patch-objects_longobject.c
@@ -0,0 +1,11 @@
+--- Objects/longobject.c.orig 2007-05-07 19:30:48.000000000 +0100
++++ Objects/longobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -70,6 +70,8 @@
+ PyErr_NoMemory();
+ return NULL;
+ }
++ /* XXX(nnorwitz): This can overflow --
++ PyObject_NEW_VAR / _PyObject_VAR_SIZE need to detect overflow */
+ return PyObject_NEW_VAR(PyLongObject, &PyLong_Type, size);
+ }
+
diff --git a/lang/python31/files/patch-objects_obmalloc.c b/lang/python31/files/patch-objects_obmalloc.c
new file mode 100644
index 000000000000..27050596fbfc
--- /dev/null
+++ b/lang/python31/files/patch-objects_obmalloc.c
@@ -0,0 +1,34 @@
+--- Objects/obmalloc.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/obmalloc.c 2008-08-30 10:39:43.000000000 +0100
+@@ -727,6 +727,15 @@
+ uint size;
+
+ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
++ /*
+ * This implicitly redirects malloc(0).
+ */
+ if ((nbytes - 1) < SMALL_REQUEST_THRESHOLD) {
+@@ -1130,6 +1139,15 @@
+ if (p == NULL)
+ return PyObject_Malloc(nbytes);
+
++ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
+ pool = POOL_ADDR(p);
+ if (Py_ADDRESS_IN_RANGE(p, pool)) {
+ /* We're in charge of this block */
diff --git a/lang/python31/files/patch-objects_stringobject.c b/lang/python31/files/patch-objects_stringobject.c
new file mode 100644
index 000000000000..af55c78a4928
--- /dev/null
+++ b/lang/python31/files/patch-objects_stringobject.c
@@ -0,0 +1,49 @@
+--- Objects/stringobject.c.orig 2007-11-07 01:19:49.000000000 +0000
++++ Objects/stringobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -71,6 +71,11 @@
+ return (PyObject *)op;
+ }
+
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError, "string is too large");
++ return NULL;
++ }
++
+ /* Inline PyObject_NewVar */
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+@@ -106,7 +111,7 @@
+
+ assert(str != NULL);
+ size = strlen(str);
+- if (size > PY_SSIZE_T_MAX) {
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
+ PyErr_SetString(PyExc_OverflowError,
+ "string is too long for a Python string");
+ return NULL;
+@@ -967,14 +972,24 @@
+ Py_INCREF(a);
+ return (PyObject *)a;
+ }
++ /* Check that string sizes are not negative, to prevent an
++ overflow in cases where we are passed incorrectly-created
++ strings with negative lengths (due to a bug in other code).
++ */
+ size = a->ob_size + b->ob_size;
+- if (size < 0) {
++ if (a->ob_size < 0 || b->ob_size < 0 ||
++ a->ob_size > PY_SSIZE_T_MAX - b->ob_size) {
+ PyErr_SetString(PyExc_OverflowError,
+ "strings are too large to concat");
+ return NULL;
+ }
+
+ /* Inline PyObject_NewVar */
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError,
++ "strings are too large to concat");
++ return NULL;
++ }
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+ return PyErr_NoMemory();
diff --git a/lang/python31/files/patch-objects_tupleobject.c b/lang/python31/files/patch-objects_tupleobject.c
new file mode 100644
index 000000000000..eb133b6e002c
--- /dev/null
+++ b/lang/python31/files/patch-objects_tupleobject.c
@@ -0,0 +1,17 @@
+--- Objects/tupleobject.c.orig 2006-08-12 18:03:09.000000000 +0100
++++ Objects/tupleobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -60,11 +60,12 @@
+ Py_ssize_t nbytes = size * sizeof(PyObject *);
+ /* Check for overflow */
+ if (nbytes / sizeof(PyObject *) != (size_t)size ||
+- (nbytes += sizeof(PyTupleObject) - sizeof(PyObject *))
+- <= 0)
++ (nbytes > PY_SSIZE_T_MAX - sizeof(PyTupleObject) - sizeof(PyObject *)))
+ {
+ return PyErr_NoMemory();
+ }
++ nbytes += sizeof(PyTupleObject) - sizeof(PyObject *);
++
+ op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
+ if (op == NULL)
+ return NULL;
diff --git a/lang/python31/files/patch-objects_unicodeobject.c b/lang/python31/files/patch-objects_unicodeobject.c
new file mode 100644
index 000000000000..85e88caae0e3
--- /dev/null
+++ b/lang/python31/files/patch-objects_unicodeobject.c
@@ -0,0 +1,115 @@
+--- Objects/unicodeobject.c.orig 2007-11-02 22:46:38.000000000 +0000
++++ Objects/unicodeobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -239,6 +239,11 @@
+ return unicode_empty;
+ }
+
++ /* Ensure we won't overflow the size. */
++ if (length > ((PY_SSIZE_T_MAX / sizeof(Py_UNICODE)) - 1)) {
++ return (PyUnicodeObject *)PyErr_NoMemory();
++ }
++
+ /* Unicode freelist & memory allocation */
+ if (unicode_freelist) {
+ unicode = unicode_freelist;
+@@ -1091,6 +1096,9 @@
+ char * out;
+ char * start;
+
++ if (cbAllocated / 5 != size)
++ return PyErr_NoMemory();
++
+ if (size == 0)
+ return PyString_FromStringAndSize(NULL, 0);
+
+@@ -1689,8 +1697,9 @@
+ {
+ PyObject *v;
+ unsigned char *p;
++ Py_ssize_t nsize, bytesize;
+ #ifdef Py_UNICODE_WIDE
+- int i, pairs;
++ Py_ssize_t i, pairs;
+ #else
+ const int pairs = 0;
+ #endif
+@@ -1713,8 +1722,15 @@
+ if (s[i] >= 0x10000)
+ pairs++;
+ #endif
+- v = PyString_FromStringAndSize(NULL,
+- 2 * (size + pairs + (byteorder == 0)));
++ /* 2 * (size + pairs + (byteorder == 0)) */
++ if (size > PY_SSIZE_T_MAX ||
++ size > PY_SSIZE_T_MAX - pairs - (byteorder == 0))
++ return PyErr_NoMemory();
++ nsize = (size + pairs + (byteorder == 0));
++ bytesize = nsize * 2;
++ if (bytesize / 2 != nsize)
++ return PyErr_NoMemory();
++ v = PyString_FromStringAndSize(NULL, bytesize);
+ if (v == NULL)
+ return NULL;
+
+@@ -2042,6 +2058,11 @@
+ char *p;
+
+ static const char *hexdigit = "0123456789abcdef";
++#ifdef Py_UNICODE_WIDE
++ const Py_ssize_t expandsize = 10;
++#else
++ const Py_ssize_t expandsize = 6;
++#endif
+
+ /* Initial allocation is based on the longest-possible unichr
+ escape.
+@@ -2057,13 +2078,12 @@
+ escape.
+ */
+
++ if (size > (PY_SSIZE_T_MAX - 2 - 1) / expandsize)
++ return PyErr_NoMemory();
++
+ repr = PyString_FromStringAndSize(NULL,
+ 2
+-#ifdef Py_UNICODE_WIDE
+- + 10*size
+-#else
+- + 6*size
+-#endif
++ + expandsize*size
+ + 1);
+ if (repr == NULL)
+ return NULL;
+@@ -2304,12 +2324,16 @@
+ char *q;
+
+ static const char *hexdigit = "0123456789abcdef";
+-
+ #ifdef Py_UNICODE_WIDE
+- repr = PyString_FromStringAndSize(NULL, 10 * size);
++ const Py_ssize_t expandsize = 10;
+ #else
+- repr = PyString_FromStringAndSize(NULL, 6 * size);
++ const Py_ssize_t expandsize = 6;
+ #endif
++
++ if (size > PY_SSIZE_T_MAX / expandsize)
++ return PyErr_NoMemory();
++
++ repr = PyString_FromStringAndSize(NULL, expandsize * size);
+ if (repr == NULL)
+ return NULL;
+ if (size == 0)
+@@ -4719,6 +4743,11 @@
+ return self;
+ }
+
++ if (left > PY_SSIZE_T_MAX - self->length ||
++ right > PY_SSIZE_T_MAX - (left + self->length)) {
++ PyErr_SetString(PyExc_OverflowError, "padded string is too long");
++ return NULL;
++ }
+ u = _PyUnicode_New(left + self->length + right);
+ if (u) {
+ if (left)
diff --git a/lang/python31/files/patch-python_mysnprintf.c b/lang/python31/files/patch-python_mysnprintf.c
new file mode 100644
index 000000000000..276dd21a1b31
--- /dev/null
+++ b/lang/python31/files/patch-python_mysnprintf.c
@@ -0,0 +1,55 @@
+--- Python/mysnprintf.c.orig 2001-12-21 16:32:15.000000000 +0000
++++ Python/mysnprintf.c 2008-08-30 10:46:31.000000000 +0100
+@@ -54,18 +54,28 @@
+ PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va)
+ {
+ int len; /* # bytes written, excluding \0 */
+-#ifndef HAVE_SNPRINTF
++#ifdef HAVE_SNPRINTF
++#define _PyOS_vsnprintf_EXTRA_SPACE 1
++#else
++#define _PyOS_vsnprintf_EXTRA_SPACE 512
+ char *buffer;
+ #endif
+ assert(str != NULL);
+ assert(size > 0);
+ assert(format != NULL);
++ /* We take a size_t as input but return an int. Sanity check
++ * our input so that it won't cause an overflow in the
++ * vsnprintf return value or the buffer malloc size. */
++ if (size > INT_MAX - _PyOS_vsnprintf_EXTRA_SPACE) {
++ len = -666;
++ goto Done;
++ }
+
+ #ifdef HAVE_SNPRINTF
+ len = vsnprintf(str, size, format, va);
+ #else
+ /* Emulate it. */
+- buffer = PyMem_MALLOC(size + 512);
++ buffer = PyMem_MALLOC(size + _PyOS_vsnprintf_EXTRA_SPACE);
+ if (buffer == NULL) {
+ len = -666;
+ goto Done;
+@@ -75,7 +85,7 @@
+ if (len < 0)
+ /* ignore the error */;
+
+- else if ((size_t)len >= size + 512)
++ else if ((size_t)len >= size + _PyOS_vsnprintf_EXTRA_SPACE)
+ Py_FatalError("Buffer overflow in PyOS_snprintf/PyOS_vsnprintf");
+
+ else {
+@@ -86,8 +96,10 @@
+ str[to_copy] = '\0';
+ }
+ PyMem_FREE(buffer);
+-Done:
+ #endif
+- str[size-1] = '\0';
++Done:
++ if (size > 0)
++ str[size-1] = '\0';
+ return len;
++#undef _PyOS_vsnprintf_EXTRA_SPACE
+ }
diff --git a/lang/python32/Makefile b/lang/python32/Makefile
index 6224b163004d..fa6cd7bd7061 100644
--- a/lang/python32/Makefile
+++ b/lang/python32/Makefile
@@ -6,7 +6,7 @@
PORTNAME= python25
PORTVERSION= 2.5.2
-PORTREVISION= 2
+PORTREVISION= 3
CATEGORIES= lang python ipv6
MASTER_SITES= ${PYTHON_MASTER_SITES}
MASTER_SITE_SUBDIR= ${PYTHON_MASTER_SITE_SUBDIR}
diff --git a/lang/python32/files/patch-lib-test_test_bigmem.py b/lang/python32/files/patch-lib-test_test_bigmem.py
new file mode 100644
index 000000000000..5ec5935a046f
--- /dev/null
+++ b/lang/python32/files/patch-lib-test_test_bigmem.py
@@ -0,0 +1,163 @@
+--- Lib/test/test_bigmem.py.orig 2007-11-30 21:53:17.000000000 +0000
++++ Lib/test/test_bigmem.py 2008-08-30 10:16:13.000000000 +0100
+@@ -1,5 +1,5 @@
+ from test import test_support
+-from test.test_support import bigmemtest, _1G, _2G
++from test.test_support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest
+
+ import unittest
+ import operator
+@@ -54,6 +54,22 @@
+ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
+ self.assertEquals(s.strip(), SUBSTR.strip())
+
++ @precisionbigmemtest(size=_2G - 1, memuse=1)
++ def test_center_unicode(self, size):
++ SUBSTR = u' abc def ghi'
++ try:
++ s = SUBSTR.center(size)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(s), size)
++ lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2
++ if len(s) % 2:
++ lpadsize += 1
++ self.assertEquals(s[lpadsize:-rpadsize], SUBSTR)
++ self.assertEquals(s.strip(), SUBSTR.strip())
++ del s
++
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_count(self, size):
+ SUBSTR = ' abc def ghi'
+@@ -70,10 +86,44 @@
+ s = '.' * size
+ self.assertEquals(len(s.decode('utf-8')), size)
+
++ def basic_encode_test(self, size, enc, c=u'.', expectedsize=None):
++ if expectedsize is None:
++ expectedsize = size
++
++ s = c * size
++ self.assertEquals(len(s.encode(enc)), expectedsize)
++
+ @bigmemtest(minsize=_2G + 2, memuse=3)
+ def test_encode(self, size):
+- s = u'.' * size
+- self.assertEquals(len(s.encode('utf-8')), size)
++ return self.basic_encode_test(size, 'utf-8')
++
++ @precisionbigmemtest(size=_4G / 6 + 2, memuse=2)
++ def test_encode_raw_unicode_escape(self, size):
++ try:
++ return self.basic_encode_test(size, 'raw_unicode_escape')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_4G / 5 + 70, memuse=3)
++ def test_encode_utf7(self, size):
++ try:
++ return self.basic_encode_test(size, 'utf7')
++ except MemoryError:
++ pass # acceptable on 32-bit
++
++ @precisionbigmemtest(size=_2G-1, memuse=2)
++ def test_decodeascii(self, size):
++ return self.basic_encode_test(size, 'ascii', c='A')
++
++ @precisionbigmemtest(size=_4G / 5, memuse=6+2)
++ def test_unicode_repr_oflw(self, size):
++ try:
++ s = u"\uAAAA"*size
++ r = repr(s)
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ self.failUnless(s == eval(r))
+
+ @bigmemtest(minsize=_2G, memuse=2)
+ def test_endswith(self, size):
+@@ -459,6 +509,11 @@
+ self.assertEquals(s.count('\\'), size)
+ self.assertEquals(s.count('0'), size * 2)
+
++ @bigmemtest(minsize=2**32 / 5, memuse=6+2)
++ def test_unicode_repr(self, size):
++ s = u"\uAAAA" * size
++ self.failUnless(len(repr(s)) > size)
++
+ # This test is meaningful even with size < 2G, as long as the
+ # doubled string is > 2G (but it tests more if both are > 2G :)
+ @bigmemtest(minsize=_1G + 2, memuse=3)
+@@ -642,6 +697,35 @@
+ def test_repeat_large(self, size):
+ return self.basic_test_repeat(size)
+
++ @bigmemtest(minsize=_1G - 1, memuse=12)
++ def test_repeat_large_2(self, size):
++ return self.basic_test_repeat(size)
++
++ @precisionbigmemtest(size=_1G - 1, memuse=9)
++ def test_from_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++
++ @precisionbigmemtest(size=_1G - 25, memuse=9)
++ def test_from_almost_2G_generator(self, size):
++ try:
++ t = tuple(xrange(size))
++ count = 0
++ for item in t:
++ self.assertEquals(item, count)
++ count += 1
++ self.assertEquals(count, size)
++ except MemoryError:
++ pass # acceptable, expected on 32-bit
++
+ # Like test_concat, split in two.
+ def basic_test_repr(self, size):
+ t = (0,) * size
+@@ -957,8 +1041,34 @@
+ self.assertEquals(l[:10], [1] * 10)
+ self.assertEquals(l[-10:], [5] * 10)
+
++class BufferTest(unittest.TestCase):
++
++ @precisionbigmemtest(size=_1G, memuse=4)
++ def test_repeat(self, size):
++ try:
++ b = buffer("AAAA")*size
++ except MemoryError:
++ pass # acceptable on 32-bit
++ else:
++ count = 0
++ for c in b:
++ self.assertEquals(c, 'A')
++ count += 1
++ self.assertEquals(count, size*4)
++
+ def test_main():
+- test_support.run_unittest(StrTest, TupleTest, ListTest)
++ test_support.run_unittest(StrTest, TupleTest, ListTest, BufferTest)
++
++# Expected failures (crashers)
++# del StrTest.test_center_unicode
++del StrTest.test_decodeascii
++# del StrTest.test_encode_utf32
++# del StrTest.test_encode_utf7
++# del StrTest.test_encode_raw_unicode_escape
++#
++# del TupleTest.test_from_2G_generator
++#
++# del BufferTest.test_repeat
+
+ if __name__ == '__main__':
+ if len(sys.argv) > 1:
diff --git a/lang/python32/files/patch-lib-test_test_hashlib.py b/lang/python32/files/patch-lib-test_test_hashlib.py
new file mode 100644
index 000000000000..15ede2b17e8c
--- /dev/null
+++ b/lang/python32/files/patch-lib-test_test_hashlib.py
@@ -0,0 +1,41 @@
+--- Lib/test/test_hashlib.py.orig 2005-08-21 19:45:59.000000000 +0100
++++ Lib/test/test_hashlib.py 2008-08-30 10:43:27.000000000 +0100
+@@ -9,7 +9,7 @@
+ import hashlib
+ import unittest
+ from test import test_support
+-
++from test.test_support import _4G, precisionbigmemtest
+
+ def hexstr(s):
+ import string
+@@ -55,7 +55,6 @@
+ m2.update(aas + bees + cees)
+ self.assertEqual(m1.digest(), m2.digest())
+
+-
+ def check(self, name, data, digest):
+ # test the direct constructors
+ computed = getattr(hashlib, name)(data).hexdigest()
+@@ -75,6 +74,21 @@
+ self.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
+ 'd174ab98d277d9f5a5611c2c9f419d9f')
+
++ @precisionbigmemtest(size=_4G + 5, memuse=1)
++ def test_case_md5_huge(self, size):
++ if size == _4G + 5:
++ try:
++ self.check('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
++ except OverflowError:
++ pass # 32-bit arch
++
++ @precisionbigmemtest(size=_4G - 1, memuse=1)
++ def test_case_md5_uintmax(self, size):
++ if size == _4G - 1:
++ try:
++ self.check('md5', 'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
++ except OverflowError:
++ pass # 32-bit arch
+
+ # use the three examples from Federal Information Processing Standards
+ # Publication 180-1, Secure Hash Standard, 1995 April 17
diff --git a/lang/python32/files/patch-lib-test_test_strop.py b/lang/python32/files/patch-lib-test_test_strop.py
new file mode 100644
index 000000000000..f0e40166957a
--- /dev/null
+++ b/lang/python32/files/patch-lib-test_test_strop.py
@@ -0,0 +1,28 @@
+--- Lib/test/test_strop.py.orig 2002-07-31 00:27:12.000000000 +0100
++++ Lib/test/test_strop.py 2008-08-30 10:16:13.000000000 +0100
+@@ -115,6 +115,25 @@
+ strop.uppercase
+ strop.whitespace
+
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=5)
++ def test_stropjoin_huge_list(self, size):
++ a = "A" * size
++ try:
++ r = strop.join([a, a], a)
++ except OverflowError:
++ pass
++ else:
++ self.assertEquals(len(r), len(a) * 3)
++
++ @test_support.precisionbigmemtest(size=test_support._2G - 1, memuse=1)
++ def test_stropjoin_huge_tup(self, size):
++ a = "A" * size
++ try:
++ r = strop.join((a, a), a)
++ except OverflowError:
++ pass # acceptable on 32-bit
++ else:
++ self.assertEquals(len(r), len(a) * 3)
+
+ transtable = '\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`xyzdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377'
+
diff --git a/lang/python32/files/patch-lib-test_test_support.py b/lang/python32/files/patch-lib-test_test_support.py
new file mode 100644
index 000000000000..b11933bc7eed
--- /dev/null
+++ b/lang/python32/files/patch-lib-test_test_support.py
@@ -0,0 +1,62 @@
+--- Lib/test/test_support.py.orig 2008-01-27 01:24:44.000000000 +0000
++++ Lib/test/test_support.py 2008-08-30 10:16:13.000000000 +0100
+@@ -33,6 +33,7 @@
+ use_resources = None # Flag set to [] by regrtest.py
+ max_memuse = 0 # Disable bigmem tests (they will still be run with
+ # small sizes, to make sure they work.)
++real_max_memuse = 0
+
+ # _original_stdout is meant to hold stdout at the time regrtest began.
+ # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
+@@ -323,6 +324,7 @@
+ _1M = 1024*1024
+ _1G = 1024 * _1M
+ _2G = 2 * _1G
++_4G = 4 * _1G
+
+ # Hack to get at the maximum value an internal index can take.
+ class _Dummy:
+@@ -333,6 +335,7 @@
+ def set_memlimit(limit):
+ import re
+ global max_memuse
++ global real_max_memuse
+ sizes = {
+ 'k': 1024,
+ 'm': _1M,
+@@ -344,6 +347,7 @@
+ if m is None:
+ raise ValueError('Invalid memory limit %r' % (limit,))
+ memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
++ real_max_memuse = memlimit
+ if memlimit > MAX_Py_ssize_t:
+ memlimit = MAX_Py_ssize_t
+ if memlimit < _2G - 1:
+@@ -389,6 +393,27 @@
+ return wrapper
+ return decorator
+
++def precisionbigmemtest(size, memuse, overhead=5*_1M):
++ def decorator(f):
++ def wrapper(self):
++ if not real_max_memuse:
++ maxsize = 5147
++ else:
++ maxsize = size
++
++ if real_max_memuse and real_max_memuse < maxsize * memuse:
++ if verbose:
++ sys.stderr.write("Skipping %s because of memory "
++ "constraint\n" % (f.__name__,))
++ return
++
++ return f(self, maxsize)
++ wrapper.size = size
++ wrapper.memuse = memuse
++ wrapper.overhead = overhead
++ return wrapper
++ return decorator
++
+ def bigaddrspacetest(f):
+ """Decorator for tests that fill the address space."""
+ def wrapper(self):
diff --git a/lang/python32/files/patch-lib_seq_tests.py b/lang/python32/files/patch-lib_seq_tests.py
new file mode 100644
index 000000000000..9be35ae82517
--- /dev/null
+++ b/lang/python32/files/patch-lib_seq_tests.py
@@ -0,0 +1,21 @@
+--- Lib/test/seq_tests.py.orig 2007-11-12 20:04:41.000000000 +0000
++++ Lib/test/seq_tests.py 2008-08-30 10:16:13.000000000 +0100
+@@ -307,11 +307,13 @@
+ self.assertEqual(id(s), id(s*1))
+
+ def test_bigrepeat(self):
+- x = self.type2test([0])
+- x *= 2**16
+- self.assertRaises(MemoryError, x.__mul__, 2**16)
+- if hasattr(x, '__imul__'):
+- self.assertRaises(MemoryError, x.__imul__, 2**16)
++ import sys
++ if sys.maxint <= 2147483647:
++ x = self.type2test([0])
++ x *= 2**16
++ self.assertRaises(MemoryError, x.__mul__, 2**16)
++ if hasattr(x, '__imul__'):
++ self.assertRaises(MemoryError, x.__imul__, 2**16)
+
+ def test_subscript(self):
+ a = self.type2test([10, 11])
diff --git a/lang/python32/files/patch-modules_almodule.c b/lang/python32/files/patch-modules_almodule.c
new file mode 100644
index 000000000000..8e3b9b11380d
--- /dev/null
+++ b/lang/python32/files/patch-modules_almodule.c
@@ -0,0 +1,14 @@
+--- Modules/almodule.c.orig 2006-09-25 07:53:42.000000000 +0100
++++ Modules/almodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -1633,9 +1633,11 @@
+ if (nvals < 0)
+ goto cleanup;
+ if (nvals > setsize) {
++ ALvalue *old_return_set = return_set;
+ setsize = nvals;
+ PyMem_RESIZE(return_set, ALvalue, setsize);
+ if (return_set == NULL) {
++ return_set = old_return_set;
+ PyErr_NoMemory();
+ goto cleanup;
+ }
diff --git a/lang/python32/files/patch-modules_arraymodule.c b/lang/python32/files/patch-modules_arraymodule.c
new file mode 100644
index 000000000000..738ee48f1485
--- /dev/null
+++ b/lang/python32/files/patch-modules_arraymodule.c
@@ -0,0 +1,33 @@
+--- Modules/arraymodule.c.orig 2008-02-15 19:11:46.000000000 +0000
++++ Modules/arraymodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -816,6 +816,7 @@
+ array_do_extend(arrayobject *self, PyObject *bb)
+ {
+ Py_ssize_t size;
++ char *old_item;
+
+ if (!array_Check(bb))
+ return array_iter_extend(self, bb);
+@@ -831,10 +832,11 @@
+ return -1;
+ }
+ size = self->ob_size + b->ob_size;
++ old_item = self->ob_item;
+ PyMem_RESIZE(self->ob_item, char, size*self->ob_descr->itemsize);
+ if (self->ob_item == NULL) {
+- PyObject_Del(self);
+- PyErr_NoMemory();
++ self->ob_item = old_item;
++ PyErr_NoMemory();
+ return -1;
+ }
+ memcpy(self->ob_item + self->ob_size*self->ob_descr->itemsize,
+@@ -886,7 +888,7 @@
+ if (size > PY_SSIZE_T_MAX / n) {
+ return PyErr_NoMemory();
+ }
+- PyMem_Resize(items, char, n * size);
++ PyMem_RESIZE(items, char, n * size);
+ if (items == NULL)
+ return PyErr_NoMemory();
+ p = items;
diff --git a/lang/python32/files/patch-modules_gcmodule.c b/lang/python32/files/patch-modules_gcmodule.c
new file mode 100644
index 000000000000..59253930e9c3
--- /dev/null
+++ b/lang/python32/files/patch-modules_gcmodule.c
@@ -0,0 +1,58 @@
+--- Include/pymem.h.orig 2008-02-14 11:26:18.000000000 +0000
++++ Include/pymem.h 2008-08-30 10:39:43.000000000 +0100
+@@ -67,8 +67,12 @@
+ for malloc(0), which would be treated as an error. Some platforms
+ would return a pointer with no memory behind it, which would break
+ pymalloc. To solve these problems, allocate an extra byte. */
+-#define PyMem_MALLOC(n) malloc((n) ? (n) : 1)
+-#define PyMem_REALLOC(p, n) realloc((p), (n) ? (n) : 1)
++/* Returns NULL to indicate error if a negative size or size larger than
++ Py_ssize_t can represent is supplied. Helps prevents security holes. */
++#define PyMem_MALLOC(n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : malloc((n) ? (n) : 1))
++#define PyMem_REALLOC(p, n) (((n) < 0 || (n) > PY_SSIZE_T_MAX) ? NULL \
++ : realloc((p), (n) ? (n) : 1))
+ #define PyMem_FREE free
+
+ #endif /* PYMALLOC_DEBUG */
+@@ -77,24 +81,31 @@
+ * Type-oriented memory interface
+ * ==============================
+ *
+- * These are carried along for historical reasons. There's rarely a good
+- * reason to use them anymore (you can just as easily do the multiply and
+- * cast yourself).
++ * Allocate memory for n objects of the given type. Returns a new pointer
++ * or NULL if the request was too large or memory allocation failed. Use
++ * these macros rather than doing the multiplication yourself so that proper
++ * overflow checking is always done.
+ */
+
+ #define PyMem_New(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_Malloc((n) * sizeof(type)) ) )
+ #define PyMem_NEW(type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
++ ( ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
+ ( (type *) PyMem_MALLOC((n) * sizeof(type)) ) )
+
++/*
++ * The value of (p) is always clobbered by this macro regardless of success.
++ * The caller MUST check if (p) is NULL afterwards and deal with the memory
++ * error if so. This means the original value of (p) MUST be saved for the
++ * caller's memory error handler to not lose track of it.
++ */
+ #define PyMem_Resize(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_Realloc((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_Realloc((p), (n) * sizeof(type)) )
+ #define PyMem_RESIZE(p, type, n) \
+- ( assert((n) <= PY_SIZE_MAX / sizeof(type)) , \
+- ( (p) = (type *) PyMem_REALLOC((p), (n) * sizeof(type)) ) )
++ ( (p) = ((n) > PY_SSIZE_T_MAX / sizeof(type)) ? NULL : \
++ (type *) PyMem_REALLOC((p), (n) * sizeof(type)) )
+
+ /* PyMem{Del,DEL} are left over from ancient days, and shouldn't be used
+ * anymore. They're just confusing aliases for PyMem_{Free,FREE} now.
diff --git a/lang/python32/files/patch-modules_hashopenssl.c b/lang/python32/files/patch-modules_hashopenssl.c
new file mode 100644
index 000000000000..1f0cd56a244a
--- /dev/null
+++ b/lang/python32/files/patch-modules_hashopenssl.c
@@ -0,0 +1,104 @@
+--- Modules/_hashopenssl.c.orig 2006-05-29 22:04:52.000000000 +0100
++++ Modules/_hashopenssl.c 2008-08-30 10:43:27.000000000 +0100
+@@ -19,6 +19,8 @@
+ /* EVP is the preferred interface to hashing in OpenSSL */
+ #include <openssl/evp.h>
+
++#define MUNCH_SIZE INT_MAX
++
+
+ #ifndef HASH_OBJ_CONSTRUCTOR
+ #define HASH_OBJ_CONSTRUCTOR 0
+@@ -164,9 +166,18 @@
+ if (!PyArg_ParseTuple(args, "s#:update", &cp, &len))
+ return NULL;
+
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
+-
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+@@ -255,9 +266,20 @@
+ self->name = name_obj;
+ Py_INCREF(self->name);
+
+- if (cp && len)
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
+ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return 0;
+ }
+@@ -328,7 +350,7 @@
+ static PyObject *
+ EVPnew(PyObject *name_obj,
+ const EVP_MD *digest, const EVP_MD_CTX *initial_ctx,
+- const unsigned char *cp, unsigned int len)
++ const unsigned char *cp, Py_ssize_t len)
+ {
+ EVPobject *self;
+
+@@ -346,8 +368,20 @@
+ EVP_DigestInit(&self->ctx, digest);
+ }
+
+- if (cp && len)
+- EVP_DigestUpdate(&self->ctx, cp, len);
++ if (cp && len) {
++ if (len > 0 && len <= MUNCH_SIZE) {
++ EVP_DigestUpdate(&self->ctx, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
++ unsigned int));
++ } else {
++ Py_ssize_t offset = 0;
++ while (len) {
++ unsigned int process = len > MUNCH_SIZE ? MUNCH_SIZE : len;
++ EVP_DigestUpdate(&self->ctx, cp + offset, process);
++ len -= process;
++ offset += process;
++ }
++ }
++ }
+
+ return (PyObject *)self;
+ }
+@@ -384,8 +418,7 @@
+
+ digest = EVP_get_digestbyname(name);
+
+- return EVPnew(name_obj, digest, NULL, cp, Py_SAFE_DOWNCAST(len, Py_ssize_t,
+- unsigned int));
++ return EVPnew(name_obj, digest, NULL, cp, len);
+ }
+
+ /*
+@@ -410,7 +443,7 @@
+ CONST_ ## NAME ## _name_obj, \
+ NULL, \
+ CONST_new_ ## NAME ## _ctx_p, \
+- cp, Py_SAFE_DOWNCAST(len, Py_ssize_t, unsigned int)); \
++ cp, len); \
+ }
+
+ /* a PyMethodDef structure for the constructor */
diff --git a/lang/python32/files/patch-modules_mmapmodule.c b/lang/python32/files/patch-modules_mmapmodule.c
new file mode 100644
index 000000000000..60f3d71ff349
--- /dev/null
+++ b/lang/python32/files/patch-modules_mmapmodule.c
@@ -0,0 +1,11 @@
+--- Modules/mmapmodule.c.orig 2006-08-22 14:57:07.000000000 +0100
++++ Modules/mmapmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -223,7 +223,7 @@
+ return(NULL);
+
+ /* silently 'adjust' out-of-range requests */
+- if ((self->pos + num_bytes) > self->size) {
++ if (num_bytes > self->size - self->pos) {
+ num_bytes -= (self->pos+num_bytes) - self->size;
+ }
+ result = Py_BuildValue("s#", self->data+self->pos, num_bytes);
diff --git a/lang/python32/files/patch-modules_selectmodule.c b/lang/python32/files/patch-modules_selectmodule.c
new file mode 100644
index 000000000000..446241f05a97
--- /dev/null
+++ b/lang/python32/files/patch-modules_selectmodule.c
@@ -0,0 +1,16 @@
+--- Modules/selectmodule.c.orig 2006-07-10 02:18:57.000000000 +0100
++++ Modules/selectmodule.c 2008-08-30 10:39:43.000000000 +0100
+@@ -349,10 +349,12 @@
+ {
+ Py_ssize_t i, pos;
+ PyObject *key, *value;
++ struct pollfd *old_ufds = self->ufds;
+
+ self->ufd_len = PyDict_Size(self->dict);
+- PyMem_Resize(self->ufds, struct pollfd, self->ufd_len);
++ PyMem_RESIZE(self->ufds, struct pollfd, self->ufd_len);
+ if (self->ufds == NULL) {
++ self->ufds = old_ufds;
+ PyErr_NoMemory();
+ return 0;
+ }
diff --git a/lang/python32/files/patch-modules_stropmodule.c b/lang/python32/files/patch-modules_stropmodule.c
new file mode 100644
index 000000000000..d7f42ce18871
--- /dev/null
+++ b/lang/python32/files/patch-modules_stropmodule.c
@@ -0,0 +1,31 @@
+--- Modules/stropmodule.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Modules/stropmodule.c 2008-08-30 10:16:13.000000000 +0100
+@@ -216,6 +216,13 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0)
+ return NULL;
+@@ -253,6 +260,14 @@
+ return NULL;
+ }
+ slen = PyString_GET_SIZE(item);
++ if (slen > PY_SSIZE_T_MAX - reslen ||
++ seplen > PY_SSIZE_T_MAX - reslen - seplen) {
++ PyErr_SetString(PyExc_OverflowError,
++ "input too long");
++ Py_DECREF(res);
++ Py_XDECREF(item);
++ return NULL;
++ }
+ while (reslen + slen + seplen >= sz) {
+ if (_PyString_Resize(&res, sz * 2) < 0) {
+ Py_DECREF(item);
diff --git a/lang/python32/files/patch-objects_bufferobject.c b/lang/python32/files/patch-objects_bufferobject.c
new file mode 100644
index 000000000000..16e99568ad9a
--- /dev/null
+++ b/lang/python32/files/patch-objects_bufferobject.c
@@ -0,0 +1,13 @@
+--- Objects/bufferobject.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/bufferobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -427,6 +427,10 @@
+ count = 0;
+ if (!get_buf(self, &ptr, &size, ANY_BUFFER))
+ return NULL;
++ if (count > PY_SSIZE_T_MAX / size) {
++ PyErr_SetString(PyExc_MemoryError, "result too large");
++ return NULL;
++ }
+ ob = PyString_FromStringAndSize(NULL, size * count);
+ if ( ob == NULL )
+ return NULL;
diff --git a/lang/python32/files/patch-objects_longobject.c b/lang/python32/files/patch-objects_longobject.c
new file mode 100644
index 000000000000..1221db9fed0a
--- /dev/null
+++ b/lang/python32/files/patch-objects_longobject.c
@@ -0,0 +1,11 @@
+--- Objects/longobject.c.orig 2007-05-07 19:30:48.000000000 +0100
++++ Objects/longobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -70,6 +70,8 @@
+ PyErr_NoMemory();
+ return NULL;
+ }
++ /* XXX(nnorwitz): This can overflow --
++ PyObject_NEW_VAR / _PyObject_VAR_SIZE need to detect overflow */
+ return PyObject_NEW_VAR(PyLongObject, &PyLong_Type, size);
+ }
+
diff --git a/lang/python32/files/patch-objects_obmalloc.c b/lang/python32/files/patch-objects_obmalloc.c
new file mode 100644
index 000000000000..27050596fbfc
--- /dev/null
+++ b/lang/python32/files/patch-objects_obmalloc.c
@@ -0,0 +1,34 @@
+--- Objects/obmalloc.c.orig 2008-02-14 11:26:18.000000000 +0000
++++ Objects/obmalloc.c 2008-08-30 10:39:43.000000000 +0100
+@@ -727,6 +727,15 @@
+ uint size;
+
+ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
++ /*
+ * This implicitly redirects malloc(0).
+ */
+ if ((nbytes - 1) < SMALL_REQUEST_THRESHOLD) {
+@@ -1130,6 +1139,15 @@
+ if (p == NULL)
+ return PyObject_Malloc(nbytes);
+
++ /*
++ * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes.
++ * Most python internals blindly use a signed Py_ssize_t to track
++ * things without checking for overflows or negatives.
++ * As size_t is unsigned, checking for nbytes < 0 is not required.
++ */
++ if (nbytes > PY_SSIZE_T_MAX)
++ return NULL;
++
+ pool = POOL_ADDR(p);
+ if (Py_ADDRESS_IN_RANGE(p, pool)) {
+ /* We're in charge of this block */
diff --git a/lang/python32/files/patch-objects_stringobject.c b/lang/python32/files/patch-objects_stringobject.c
new file mode 100644
index 000000000000..af55c78a4928
--- /dev/null
+++ b/lang/python32/files/patch-objects_stringobject.c
@@ -0,0 +1,49 @@
+--- Objects/stringobject.c.orig 2007-11-07 01:19:49.000000000 +0000
++++ Objects/stringobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -71,6 +71,11 @@
+ return (PyObject *)op;
+ }
+
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError, "string is too large");
++ return NULL;
++ }
++
+ /* Inline PyObject_NewVar */
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+@@ -106,7 +111,7 @@
+
+ assert(str != NULL);
+ size = strlen(str);
+- if (size > PY_SSIZE_T_MAX) {
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
+ PyErr_SetString(PyExc_OverflowError,
+ "string is too long for a Python string");
+ return NULL;
+@@ -967,14 +972,24 @@
+ Py_INCREF(a);
+ return (PyObject *)a;
+ }
++ /* Check that string sizes are not negative, to prevent an
++ overflow in cases where we are passed incorrectly-created
++ strings with negative lengths (due to a bug in other code).
++ */
+ size = a->ob_size + b->ob_size;
+- if (size < 0) {
++ if (a->ob_size < 0 || b->ob_size < 0 ||
++ a->ob_size > PY_SSIZE_T_MAX - b->ob_size) {
+ PyErr_SetString(PyExc_OverflowError,
+ "strings are too large to concat");
+ return NULL;
+ }
+
+ /* Inline PyObject_NewVar */
++ if (size > PY_SSIZE_T_MAX - sizeof(PyStringObject)) {
++ PyErr_SetString(PyExc_OverflowError,
++ "strings are too large to concat");
++ return NULL;
++ }
+ op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size);
+ if (op == NULL)
+ return PyErr_NoMemory();
diff --git a/lang/python32/files/patch-objects_tupleobject.c b/lang/python32/files/patch-objects_tupleobject.c
new file mode 100644
index 000000000000..eb133b6e002c
--- /dev/null
+++ b/lang/python32/files/patch-objects_tupleobject.c
@@ -0,0 +1,17 @@
+--- Objects/tupleobject.c.orig 2006-08-12 18:03:09.000000000 +0100
++++ Objects/tupleobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -60,11 +60,12 @@
+ Py_ssize_t nbytes = size * sizeof(PyObject *);
+ /* Check for overflow */
+ if (nbytes / sizeof(PyObject *) != (size_t)size ||
+- (nbytes += sizeof(PyTupleObject) - sizeof(PyObject *))
+- <= 0)
++ (nbytes > PY_SSIZE_T_MAX - sizeof(PyTupleObject) - sizeof(PyObject *)))
+ {
+ return PyErr_NoMemory();
+ }
++ nbytes += sizeof(PyTupleObject) - sizeof(PyObject *);
++
+ op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
+ if (op == NULL)
+ return NULL;
diff --git a/lang/python32/files/patch-objects_unicodeobject.c b/lang/python32/files/patch-objects_unicodeobject.c
new file mode 100644
index 000000000000..85e88caae0e3
--- /dev/null
+++ b/lang/python32/files/patch-objects_unicodeobject.c
@@ -0,0 +1,115 @@
+--- Objects/unicodeobject.c.orig 2007-11-02 22:46:38.000000000 +0000
++++ Objects/unicodeobject.c 2008-08-30 10:16:13.000000000 +0100
+@@ -239,6 +239,11 @@
+ return unicode_empty;
+ }
+
++ /* Ensure we won't overflow the size. */
++ if (length > ((PY_SSIZE_T_MAX / sizeof(Py_UNICODE)) - 1)) {
++ return (PyUnicodeObject *)PyErr_NoMemory();
++ }
++
+ /* Unicode freelist & memory allocation */
+ if (unicode_freelist) {
+ unicode = unicode_freelist;
+@@ -1091,6 +1096,9 @@
+ char * out;
+ char * start;
+
++ if (cbAllocated / 5 != size)
++ return PyErr_NoMemory();
++
+ if (size == 0)
+ return PyString_FromStringAndSize(NULL, 0);
+
+@@ -1689,8 +1697,9 @@
+ {
+ PyObject *v;
+ unsigned char *p;
++ Py_ssize_t nsize, bytesize;
+ #ifdef Py_UNICODE_WIDE
+- int i, pairs;
++ Py_ssize_t i, pairs;
+ #else
+ const int pairs = 0;
+ #endif
+@@ -1713,8 +1722,15 @@
+ if (s[i] >= 0x10000)
+ pairs++;
+ #endif
+- v = PyString_FromStringAndSize(NULL,
+- 2 * (size + pairs + (byteorder == 0)));
++ /* 2 * (size + pairs + (byteorder == 0)) */
++ if (size > PY_SSIZE_T_MAX ||
++ size > PY_SSIZE_T_MAX - pairs - (byteorder == 0))
++ return PyErr_NoMemory();
++ nsize = (size + pairs + (byteorder == 0));
++ bytesize = nsize * 2;
++ if (bytesize / 2 != nsize)
++ return PyErr_NoMemory();
++ v = PyString_FromStringAndSize(NULL, bytesize);
+ if (v == NULL)
+ return NULL;
+
+@@ -2042,6 +2058,11 @@
+ char *p;
+
+ static const char *hexdigit = "0123456789abcdef";
++#ifdef Py_UNICODE_WIDE
++ const Py_ssize_t expandsize = 10;
++#else
++ const Py_ssize_t expandsize = 6;
++#endif
+
+ /* Initial allocation is based on the longest-possible unichr
+ escape.
+@@ -2057,13 +2078,12 @@
+ escape.
+ */
+
++ if (size > (PY_SSIZE_T_MAX - 2 - 1) / expandsize)
++ return PyErr_NoMemory();
++
+ repr = PyString_FromStringAndSize(NULL,
+ 2
+-#ifdef Py_UNICODE_WIDE
+- + 10*size
+-#else
+- + 6*size
+-#endif
++ + expandsize*size
+ + 1);
+ if (repr == NULL)
+ return NULL;
+@@ -2304,12 +2324,16 @@
+ char *q;
+
+ static const char *hexdigit = "0123456789abcdef";
+-
+ #ifdef Py_UNICODE_WIDE
+- repr = PyString_FromStringAndSize(NULL, 10 * size);
++ const Py_ssize_t expandsize = 10;
+ #else
+- repr = PyString_FromStringAndSize(NULL, 6 * size);
++ const Py_ssize_t expandsize = 6;
+ #endif
++
++ if (size > PY_SSIZE_T_MAX / expandsize)
++ return PyErr_NoMemory();
++
++ repr = PyString_FromStringAndSize(NULL, expandsize * size);
+ if (repr == NULL)
+ return NULL;
+ if (size == 0)
+@@ -4719,6 +4743,11 @@
+ return self;
+ }
+
++ if (left > PY_SSIZE_T_MAX - self->length ||
++ right > PY_SSIZE_T_MAX - (left + self->length)) {
++ PyErr_SetString(PyExc_OverflowError, "padded string is too long");
++ return NULL;
++ }
+ u = _PyUnicode_New(left + self->length + right);
+ if (u) {
+ if (left)
diff --git a/lang/python32/files/patch-python_mysnprintf.c b/lang/python32/files/patch-python_mysnprintf.c
new file mode 100644
index 000000000000..276dd21a1b31
--- /dev/null
+++ b/lang/python32/files/patch-python_mysnprintf.c
@@ -0,0 +1,55 @@
+--- Python/mysnprintf.c.orig 2001-12-21 16:32:15.000000000 +0000
++++ Python/mysnprintf.c 2008-08-30 10:46:31.000000000 +0100
+@@ -54,18 +54,28 @@
+ PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va)
+ {
+ int len; /* # bytes written, excluding \0 */
+-#ifndef HAVE_SNPRINTF
++#ifdef HAVE_SNPRINTF
++#define _PyOS_vsnprintf_EXTRA_SPACE 1
++#else
++#define _PyOS_vsnprintf_EXTRA_SPACE 512
+ char *buffer;
+ #endif
+ assert(str != NULL);
+ assert(size > 0);
+ assert(format != NULL);
++ /* We take a size_t as input but return an int. Sanity check
++ * our input so that it won't cause an overflow in the
++ * vsnprintf return value or the buffer malloc size. */
++ if (size > INT_MAX - _PyOS_vsnprintf_EXTRA_SPACE) {
++ len = -666;
++ goto Done;
++ }
+
+ #ifdef HAVE_SNPRINTF
+ len = vsnprintf(str, size, format, va);
+ #else
+ /* Emulate it. */
+- buffer = PyMem_MALLOC(size + 512);
++ buffer = PyMem_MALLOC(size + _PyOS_vsnprintf_EXTRA_SPACE);
+ if (buffer == NULL) {
+ len = -666;
+ goto Done;
+@@ -75,7 +85,7 @@
+ if (len < 0)
+ /* ignore the error */;
+
+- else if ((size_t)len >= size + 512)
++ else if ((size_t)len >= size + _PyOS_vsnprintf_EXTRA_SPACE)
+ Py_FatalError("Buffer overflow in PyOS_snprintf/PyOS_vsnprintf");
+
+ else {
+@@ -86,8 +96,10 @@
+ str[to_copy] = '\0';
+ }
+ PyMem_FREE(buffer);
+-Done:
+ #endif
+- str[size-1] = '\0';
++Done:
++ if (size > 0)
++ str[size-1] = '\0';
+ return len;
++#undef _PyOS_vsnprintf_EXTRA_SPACE
+ }