]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
Closes #15910: MD5 and SHA1 crash when "updated" with strings bigger than 2**32 bytes
authorJesus Cea <jcea@jcea.es>
Mon, 10 Sep 2012 19:39:07 +0000 (21:39 +0200)
committerJesus Cea <jcea@jcea.es>
Mon, 10 Sep 2012 19:39:07 +0000 (21:39 +0200)
Lib/test/test_hashlib.py
Misc/NEWS
Modules/md5module.c
Modules/shamodule.c

index 114b09bfbbcb5295ac51d06e16df1e44f0b6c495..e6365c5c5f3635791cc2f7ab482faafe68592568 100644 (file)
@@ -167,6 +167,21 @@ class HashLibTestCase(unittest.TestCase):
                     % (name, hash_object_constructor,
                        computed, len(data), digest))
 
+    def check_update(self, name, data, digest):
+        constructors = self.constructors_to_test[name]
+        # 2 is for hashlib.name(...) and hashlib.new(name, ...)
+        self.assertGreaterEqual(len(constructors), 2)
+        for hash_object_constructor in constructors:
+            h = hash_object_constructor()
+            h.update(data)
+            computed = h.hexdigest()
+            self.assertEqual(
+                    computed, digest,
+                    "Hash algorithm %s using %s when updated returned hexdigest"
+                    " %r for %d byte input data that should have hashed to %r."
+                    % (name, hash_object_constructor,
+                       computed, len(data), digest))
+
     def check_unicode(self, algorithm_name):
         # Unicode objects are not allowed as input.
         expected = hashlib.new(algorithm_name, str(u'spam')).hexdigest()
@@ -200,6 +215,15 @@ class HashLibTestCase(unittest.TestCase):
             except OverflowError:
                 pass # 32-bit arch
 
+    @precisionbigmemtest(size=_4G + 5, memuse=1)
+    def test_case_md5_huge_update(self, size):
+        if size == _4G + 5:
+            try:
+                self.check_update('md5', 'A'*size,
+                        'c9af2dff37468ce5dfee8f2cfc0a9c6d')
+            except OverflowError:
+                pass # 32-bit arch
+
     @precisionbigmemtest(size=_4G - 1, memuse=1)
     def test_case_md5_uintmax(self, size):
         if size == _4G - 1:
@@ -237,6 +261,15 @@ class HashLibTestCase(unittest.TestCase):
             except OverflowError:
                 pass # 32-bit arch
 
+    @precisionbigmemtest(size=_4G + 5, memuse=1)
+    def test_case_sha1_huge_update(self, size):
+        if size == _4G + 5:
+            try:
+                self.check_update('sha1', 'A'*size,
+                        '87d745c50e6b2879ffa0fb2c930e9fbfe0dc9a5b')
+            except OverflowError:
+                pass # 32-bit arch
+
     # use the examples from Federal Information Processing Standards
     # Publication 180-2, Secure Hash Standard,  2002 August 1
     # http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf
index 33a57f409a91d01495501c618d1a3145f68a00ca..817a3526116de2b8398ea49cb30983a77bec6db5 100644 (file)
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -247,6 +247,9 @@ Library
 - Issue #15908: Fix misbehaviour of the sha1 module when called on data
   larger than 2**32 bytes.
 
+- Issue #15910: Fix misbehaviour of _md5 and sha1 modules when "updating"
+  on data larger than 2**32 bytes.
+
 - Issue #14875: Use float('inf') instead of float('1e66666') in the json module.
 
 - Issue #14572: Prevent build failures with pre-3.5.0 versions of
index 3461623086c0b495b3e06986e68dd38bc7972fd2..103da1497dac9607d593801a6980d8cdc0645e6a 100644 (file)
@@ -51,12 +51,25 @@ static PyObject *
 md5_update(md5object *self, PyObject *args)
 {
     Py_buffer view;
+    Py_ssize_t n;
+    unsigned char *buf;
 
     if (!PyArg_ParseTuple(args, "s*:update", &view))
         return NULL;
 
-    md5_append(&self->md5, (unsigned char*)view.buf,
-               Py_SAFE_DOWNCAST(view.len, Py_ssize_t, unsigned int));
+    n = view.len;
+    buf = (unsigned char *) view.buf;
+    while (n > 0) {
+        Py_ssize_t nbytes;
+        if (n > INT_MAX)
+            nbytes = INT_MAX;
+        else
+            nbytes = n;
+        md5_append(&self->md5, buf,
+                   Py_SAFE_DOWNCAST(nbytes, Py_ssize_t, unsigned int));
+        buf += nbytes;
+        n -= nbytes;
+    }
 
     PyBuffer_Release(&view);
     Py_RETURN_NONE;
index df7344153f94f9a892a7df3e9e64e46369c337dc..656208da9f50da9a7ae5a914eca6808d973b809c 100644 (file)
@@ -429,12 +429,25 @@ static PyObject *
 SHA_update(SHAobject *self, PyObject *args)
 {
     Py_buffer view;
+    Py_ssize_t n;
+    unsigned char *buf;
 
     if (!PyArg_ParseTuple(args, "s*:update", &view))
         return NULL;
 
-    sha_update(self, (unsigned char*)view.buf,
-               Py_SAFE_DOWNCAST(view.len, Py_ssize_t, unsigned int));
+    n = view.len;
+    buf = (unsigned char *) view.buf;
+    while (n > 0) {
+        Py_ssize_t nbytes;
+        if (n > INT_MAX)
+            nbytes = INT_MAX;
+        else
+            nbytes = n;
+        sha_update(self, buf,
+                   Py_SAFE_DOWNCAST(nbytes, Py_ssize_t, unsigned int));
+        buf += nbytes;
+        n -= nbytes;
+    }
 
     PyBuffer_Release(&view);
     Py_RETURN_NONE;