- Handle NULL returned by allocation functions.
- Handle NULL returned by copy functions.
- Suppress unused impossible return codes.
* 64 bits so we loop in <4gig chunks when needed. */
#if PY_SSIZE_T_MAX > UINT32_MAX
-#define HACL_UPDATE_LOOP(update,state,buf,len) \
- while (len > UINT32_MAX) { \
- update(state, buf, UINT32_MAX); \
- len -= UINT32_MAX; \
- buf += UINT32_MAX; \
- }
+# define HACL_UPDATE_LOOP(UPDATE_FUNC, STATE, BUF, LEN) \
+ do { \
+ while (LEN > UINT32_MAX) { \
+ (void)UPDATE_FUNC(STATE, BUF, UINT32_MAX); \
+ LEN -= UINT32_MAX; \
+ BUF += UINT32_MAX; \
+ } \
+ } while (0)
#else
-#define HACL_UPDATE_LOOP(update,state,buf,len)
+# define HACL_UPDATE_LOOP(...)
#endif
-#define HACL_UPDATE(update,state,buf,len) do { \
- /* Note: we explicitly ignore the error code on the basis that it would take >
- * 1 billion years to overflow the maximum admissible length for SHA2-256
- * (namely, 2^61-1 bytes). */ \
- HACL_UPDATE_LOOP(update,state,buf,len) \
- /* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */ \
- update(state, buf, (uint32_t) len); \
-} while (0)
+/*
+ * Note: we explicitly ignore the error code on the basis that it would take
+ * more than 1 billion years to overflow the maximum admissible length for
+ * blake2b/2s (2^64 - 1).
+ */
+#define HACL_UPDATE(UPDATE_FUNC, STATE, BUF, LEN) \
+ do { \
+ HACL_UPDATE_LOOP(UPDATE_FUNC, STATE, BUF, LEN); \
+ /* cast to uint32_t is now safe */ \
+ (void)UPDATE_FUNC(STATE, BUF, (uint32_t)LEN); \
+ } while (0)
-static void update(Blake2Object *self, uint8_t *buf, Py_ssize_t len) {
+static void
+update(Blake2Object *self, uint8_t *buf, Py_ssize_t len)
+{
switch (self->impl) {
// These need to be ifdef'd out otherwise it's an unresolved symbol at
// link-time.
switch (self->impl) {
#if HACL_CAN_COMPILE_SIMD256
- case Blake2b_256:
+ case Blake2b_256: {
self->blake2b_256_state = Hacl_Hash_Blake2b_Simd256_malloc_with_params_and_key(¶ms, last_node, key->buf);
+ if (self->blake2b_256_state == NULL) {
+ (void)PyErr_NoMemory();
+ goto error;
+ }
break;
+ }
#endif
#if HACL_CAN_COMPILE_SIMD128
- case Blake2s_128:
+ case Blake2s_128: {
self->blake2s_128_state = Hacl_Hash_Blake2s_Simd128_malloc_with_params_and_key(¶ms, last_node, key->buf);
+ if (self->blake2s_128_state == NULL) {
+ (void)PyErr_NoMemory();
+ goto error;
+ }
break;
+ }
#endif
- case Blake2b:
+ case Blake2b: {
self->blake2b_state = Hacl_Hash_Blake2b_malloc_with_params_and_key(¶ms, last_node, key->buf);
+ if (self->blake2b_state == NULL) {
+ (void)PyErr_NoMemory();
+ goto error;
+ }
break;
- case Blake2s:
+ }
+ case Blake2s: {
self->blake2s_state = Hacl_Hash_Blake2s_malloc_with_params_and_key(¶ms, last_node, key->buf);
+ if (self->blake2s_state == NULL) {
+ (void)PyErr_NoMemory();
+ goto error;
+ }
break;
+ }
default:
Py_UNREACHABLE();
}
Py_BEGIN_ALLOW_THREADS
update(self, buf.buf, buf.len);
Py_END_ALLOW_THREADS
- } else {
+ }
+ else {
update(self, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
return py_blake2b_or_s_new(type, data, digest_size, key, salt, person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
}
-/*[clinic input]
-_blake2.blake2b.copy
-
-Return a copy of the hash object.
-[clinic start generated code]*/
-
-static PyObject *
-_blake2_blake2b_copy_impl(Blake2Object *self)
-/*[clinic end generated code: output=622d1c56b91c50d8 input=e383c2d199fd8a2e]*/
+static int
+blake2_blake2b_copy_locked(Blake2Object *self, Blake2Object *cpy)
{
- Blake2Object *cpy;
-
- if ((cpy = new_Blake2Object(Py_TYPE(self))) == NULL)
- return NULL;
-
- ENTER_HASHLIB(self);
+ assert(cpy != NULL);
switch (self->impl) {
#if HACL_CAN_COMPILE_SIMD256
- case Blake2b_256:
+ case Blake2b_256: {
cpy->blake2b_256_state = Hacl_Hash_Blake2b_Simd256_copy(self->blake2b_256_state);
+ if (cpy->blake2b_256_state == NULL) {
+ goto error;
+ }
break;
+ }
#endif
#if HACL_CAN_COMPILE_SIMD128
- case Blake2s_128:
+ case Blake2s_128: {
cpy->blake2s_128_state = Hacl_Hash_Blake2s_Simd128_copy(self->blake2s_128_state);
+ if (cpy->blake2s_128_state == NULL) {
+ goto error;
+ }
break;
+ }
#endif
- case Blake2b:
+ case Blake2b: {
cpy->blake2b_state = Hacl_Hash_Blake2b_copy(self->blake2b_state);
+ if (cpy->blake2b_state == NULL) {
+ goto error;
+ }
break;
- case Blake2s:
+ }
+ case Blake2s: {
cpy->blake2s_state = Hacl_Hash_Blake2s_copy(self->blake2s_state);
+ if (cpy->blake2s_state == NULL) {
+ goto error;
+ }
break;
+ }
default:
Py_UNREACHABLE();
}
cpy->impl = self->impl;
+ return 0;
+
+error:
+ (void)PyErr_NoMemory();
+ return -1;
+}
+
+/*[clinic input]
+_blake2.blake2b.copy
+
+Return a copy of the hash object.
+[clinic start generated code]*/
+
+static PyObject *
+_blake2_blake2b_copy_impl(Blake2Object *self)
+/*[clinic end generated code: output=622d1c56b91c50d8 input=e383c2d199fd8a2e]*/
+{
+ int rc;
+ Blake2Object *cpy;
+
+ if ((cpy = new_Blake2Object(Py_TYPE(self))) == NULL) {
+ return NULL;
+ }
+
+ ENTER_HASHLIB(self);
+ rc = blake2_blake2b_copy_locked(self, cpy);
LEAVE_HASHLIB(self);
+ if (rc < 0) {
+ Py_DECREF(cpy);
+ return NULL;
+ }
return (PyObject *)cpy;
}
MD5State *st = PyType_GetModuleState(cls);
MD5object *newobj;
- if ((newobj = newMD5object(st))==NULL)
+ if ((newobj = newMD5object(st)) == NULL) {
return NULL;
+ }
ENTER_HASHLIB(self);
newobj->hash_state = Hacl_Hash_MD5_copy(self->hash_state);
LEAVE_HASHLIB(self);
+ if (newobj->hash_state == NULL) {
+ Py_DECREF(self);
+ return PyErr_NoMemory();
+ }
return (PyObject *)newobj;
}
return PyUnicode_FromStringAndSize(digest_hex, sizeof(digest_hex));
}
-static void update(Hacl_Hash_MD5_state_t *state, uint8_t *buf, Py_ssize_t len) {
+static void
+update(Hacl_Hash_MD5_state_t *state, uint8_t *buf, Py_ssize_t len)
+{
+ /*
+ * Note: we explicitly ignore the error code on the basis that it would
+ * take more than 1 billion years to overflow the maximum admissible length
+ * for MD5 (2^61 - 1).
+ */
#if PY_SSIZE_T_MAX > UINT32_MAX
- while (len > UINT32_MAX) {
- Hacl_Hash_MD5_update(state, buf, UINT32_MAX);
- len -= UINT32_MAX;
- buf += UINT32_MAX;
- }
+ while (len > UINT32_MAX) {
+ (void)Hacl_Hash_MD5_update(state, buf, UINT32_MAX);
+ len -= UINT32_MAX;
+ buf += UINT32_MAX;
+ }
#endif
- Hacl_Hash_MD5_update(state, buf, (uint32_t) len);
+ /* cast to uint32_t is now safe */
+ (void)Hacl_Hash_MD5_update(state, buf, (uint32_t)len);
}
/*[clinic input]
MD5object *new;
Py_buffer buf;
- if (string)
+ if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
+ }
MD5State *st = md5_get_state(module);
if ((new = newMD5object(st)) == NULL) {
- if (string)
+ if (string) {
PyBuffer_Release(&buf);
+ }
return NULL;
}
new->hash_state = Hacl_Hash_MD5_malloc();
-
- if (PyErr_Occurred()) {
+ if (new->hash_state == NULL) {
Py_DECREF(new);
- if (string)
+ if (string) {
PyBuffer_Release(&buf);
- return NULL;
+ }
+ return PyErr_NoMemory();
}
+
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
/* We do not initialize self->lock here as this is the constructor
Py_BEGIN_ALLOW_THREADS
update(new->hash_state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
- } else {
+ }
+ else {
update(new->hash_state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
SHA1_dealloc(PyObject *op)
{
SHA1object *ptr = _SHA1object_CAST(op);
- Hacl_Hash_SHA1_free(ptr->hash_state);
+ if (ptr->hash_state != NULL) {
+ Hacl_Hash_SHA1_free(ptr->hash_state);
+ ptr->hash_state = NULL;
+ }
PyTypeObject *tp = Py_TYPE(ptr);
PyObject_GC_UnTrack(ptr);
PyObject_GC_Del(ptr);
SHA1State *st = _PyType_GetModuleState(cls);
SHA1object *newobj;
- if ((newobj = newSHA1object(st)) == NULL)
+ if ((newobj = newSHA1object(st)) == NULL) {
return NULL;
+ }
ENTER_HASHLIB(self);
newobj->hash_state = Hacl_Hash_SHA1_copy(self->hash_state);
LEAVE_HASHLIB(self);
+ if (newobj->hash_state == NULL) {
+ Py_DECREF(newobj);
+ return PyErr_NoMemory();
+ }
return (PyObject *)newobj;
}
return _Py_strhex((const char *)digest, SHA1_DIGESTSIZE);
}
-static void update(Hacl_Hash_SHA1_state_t *state, uint8_t *buf, Py_ssize_t len) {
+static void
+update(Hacl_Hash_SHA1_state_t *state, uint8_t *buf, Py_ssize_t len)
+{
+ /*
+ * Note: we explicitly ignore the error code on the basis that it would
+ * take more than 1 billion years to overflow the maximum admissible length
+ * for SHA-1 (2^61 - 1).
+ */
#if PY_SSIZE_T_MAX > UINT32_MAX
- while (len > UINT32_MAX) {
- Hacl_Hash_SHA1_update(state, buf, UINT32_MAX);
- len -= UINT32_MAX;
- buf += UINT32_MAX;
- }
+ while (len > UINT32_MAX) {
+ (void)Hacl_Hash_SHA1_update(state, buf, UINT32_MAX);
+ len -= UINT32_MAX;
+ buf += UINT32_MAX;
+ }
#endif
- Hacl_Hash_SHA1_update(state, buf, (uint32_t) len);
+ /* cast to uint32_t is now safe */
+ (void)Hacl_Hash_SHA1_update(state, buf, (uint32_t)len);
}
/*[clinic input]
SHA1object *new;
Py_buffer buf;
- if (string)
+ if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
+ }
SHA1State *st = sha1_get_state(module);
if ((new = newSHA1object(st)) == NULL) {
- if (string)
+ if (string) {
PyBuffer_Release(&buf);
+ }
return NULL;
}
new->hash_state = Hacl_Hash_SHA1_malloc();
- if (PyErr_Occurred()) {
+ if (new->hash_state == NULL) {
Py_DECREF(new);
- if (string)
+ if (string) {
PyBuffer_Release(&buf);
- return NULL;
+ }
+ return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
Py_BEGIN_ALLOW_THREADS
update(new->hash_state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
- } else {
+ }
+ else {
update(new->hash_state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
return (sha2_state *)state;
}
-static void SHA256copy(SHA256object *src, SHA256object *dest)
+static int
+SHA256copy(SHA256object *src, SHA256object *dest)
{
dest->digestsize = src->digestsize;
dest->state = Hacl_Hash_SHA2_copy_256(src->state);
+ if (dest->state == NULL) {
+ (void)PyErr_NoMemory();
+ return -1;
+ }
+ return 0;
}
-static void SHA512copy(SHA512object *src, SHA512object *dest)
+static int
+SHA512copy(SHA512object *src, SHA512object *dest)
{
dest->digestsize = src->digestsize;
dest->state = Hacl_Hash_SHA2_copy_512(src->state);
+ if (dest->state == NULL) {
+ (void)PyErr_NoMemory();
+ return -1;
+ }
+ return 0;
}
static SHA256object *
SHA256_dealloc(PyObject *op)
{
SHA256object *ptr = _SHA256object_CAST(op);
- Hacl_Hash_SHA2_free_256(ptr->state);
+ if (ptr->state != NULL) {
+ Hacl_Hash_SHA2_free_256(ptr->state);
+ ptr->state = NULL;
+ }
PyTypeObject *tp = Py_TYPE(ptr);
PyObject_GC_UnTrack(ptr);
PyObject_GC_Del(ptr);
SHA512_dealloc(PyObject *op)
{
SHA512object *ptr = _SHA512object_CAST(op);
- Hacl_Hash_SHA2_free_512(ptr->state);
+ if (ptr->state != NULL) {
+ Hacl_Hash_SHA2_free_512(ptr->state);
+ ptr->state = NULL;
+ }
PyTypeObject *tp = Py_TYPE(ptr);
PyObject_GC_UnTrack(ptr);
PyObject_GC_Del(ptr);
/* HACL* takes a uint32_t for the length of its parameter, but Py_ssize_t can be
* 64 bits so we loop in <4gig chunks when needed. */
-static void update_256(Hacl_Hash_SHA2_state_t_256 *state, uint8_t *buf, Py_ssize_t len) {
- /* Note: we explicitly ignore the error code on the basis that it would take >
- * 1 billion years to overflow the maximum admissible length for SHA2-256
- * (namely, 2^61-1 bytes). */
+static void
+update_256(Hacl_Hash_SHA2_state_t_256 *state, uint8_t *buf, Py_ssize_t len)
+{
+ /*
+ * Note: we explicitly ignore the error code on the basis that it would
+ * take more than 1 billion years to overflow the maximum admissible length
+ * for SHA-2-256 (2^61 - 1).
+ */
#if PY_SSIZE_T_MAX > UINT32_MAX
- while (len > UINT32_MAX) {
- Hacl_Hash_SHA2_update_256(state, buf, UINT32_MAX);
- len -= UINT32_MAX;
- buf += UINT32_MAX;
- }
+ while (len > UINT32_MAX) {
+ (void)Hacl_Hash_SHA2_update_256(state, buf, UINT32_MAX);
+ len -= UINT32_MAX;
+ buf += UINT32_MAX;
+ }
#endif
- /* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */
- Hacl_Hash_SHA2_update_256(state, buf, (uint32_t) len);
+ /* cast to uint32_t is now safe */
+ (void)Hacl_Hash_SHA2_update_256(state, buf, (uint32_t)len);
}
-static void update_512(Hacl_Hash_SHA2_state_t_512 *state, uint8_t *buf, Py_ssize_t len) {
- /* Note: we explicitly ignore the error code on the basis that it would take >
- * 1 billion years to overflow the maximum admissible length for this API
- * (namely, 2^64-1 bytes). */
+static void
+update_512(Hacl_Hash_SHA2_state_t_512 *state, uint8_t *buf, Py_ssize_t len)
+{
+ /*
+ * Note: we explicitly ignore the error code on the basis that it would
+ * take more than 1 billion years to overflow the maximum admissible length
+ * for SHA-2-512 (2^64 - 1).
+ */
#if PY_SSIZE_T_MAX > UINT32_MAX
- while (len > UINT32_MAX) {
- Hacl_Hash_SHA2_update_512(state, buf, UINT32_MAX);
- len -= UINT32_MAX;
- buf += UINT32_MAX;
- }
+ while (len > UINT32_MAX) {
+ (void)Hacl_Hash_SHA2_update_512(state, buf, UINT32_MAX);
+ len -= UINT32_MAX;
+ buf += UINT32_MAX;
+ }
#endif
- /* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */
- Hacl_Hash_SHA2_update_512(state, buf, (uint32_t) len);
+ /* cast to uint32_t is now safe */
+ (void)Hacl_Hash_SHA2_update_512(state, buf, (uint32_t)len);
}
SHA256Type_copy_impl(SHA256object *self, PyTypeObject *cls)
/*[clinic end generated code: output=fabd515577805cd3 input=3137146fcb88e212]*/
{
+ int rc;
SHA256object *newobj;
sha2_state *state = _PyType_GetModuleState(cls);
if (Py_IS_TYPE(self, state->sha256_type)) {
if ((newobj = newSHA256object(state)) == NULL) {
return NULL;
}
- } else {
+ }
+ else {
if ((newobj = newSHA224object(state)) == NULL) {
return NULL;
}
}
ENTER_HASHLIB(self);
- SHA256copy(self, newobj);
+ rc = SHA256copy(self, newobj);
LEAVE_HASHLIB(self);
+ if (rc < 0) {
+ Py_DECREF(newobj);
+ return NULL;
+ }
return (PyObject *)newobj;
}
SHA512Type_copy_impl(SHA512object *self, PyTypeObject *cls)
/*[clinic end generated code: output=66d2a8ef20de8302 input=f673a18f66527c90]*/
{
+ int rc;
SHA512object *newobj;
sha2_state *state = _PyType_GetModuleState(cls);
}
ENTER_HASHLIB(self);
- SHA512copy(self, newobj);
+ rc = SHA512copy(self, newobj);
LEAVE_HASHLIB(self);
+ if (rc < 0) {
+ Py_DECREF(newobj);
+ return NULL;
+ }
return (PyObject *)newobj;
}
new->state = Hacl_Hash_SHA2_malloc_256();
new->digestsize = 32;
- if (PyErr_Occurred()) {
+ if (new->state == NULL) {
Py_DECREF(new);
if (string) {
PyBuffer_Release(&buf);
}
- return NULL;
+ return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
Py_BEGIN_ALLOW_THREADS
update_256(new->state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
- } else {
+ }
+ else {
update_256(new->state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
new->state = Hacl_Hash_SHA2_malloc_224();
new->digestsize = 28;
- if (PyErr_Occurred()) {
+ if (new->state == NULL) {
Py_DECREF(new);
if (string) {
PyBuffer_Release(&buf);
}
- return NULL;
+ return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
Py_BEGIN_ALLOW_THREADS
update_256(new->state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
- } else {
+ }
+ else {
update_256(new->state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
sha2_state *state = sha2_get_state(module);
- if (string)
+ if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
+ }
if ((new = newSHA512object(state)) == NULL) {
- if (string)
+ if (string) {
PyBuffer_Release(&buf);
+ }
return NULL;
}
new->state = Hacl_Hash_SHA2_malloc_512();
new->digestsize = 64;
- if (PyErr_Occurred()) {
+ if (new->state == NULL) {
Py_DECREF(new);
- if (string)
+ if (string) {
PyBuffer_Release(&buf);
- return NULL;
+ }
+ return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
Py_BEGIN_ALLOW_THREADS
update_512(new->state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
- } else {
+ }
+ else {
update_512(new->state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
sha2_state *state = sha2_get_state(module);
- if (string)
+ if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
+ }
if ((new = newSHA384object(state)) == NULL) {
- if (string)
+ if (string) {
PyBuffer_Release(&buf);
+ }
return NULL;
}
new->state = Hacl_Hash_SHA2_malloc_384();
new->digestsize = 48;
- if (PyErr_Occurred()) {
+ if (new->state == NULL) {
Py_DECREF(new);
- if (string)
+ if (string) {
PyBuffer_Release(&buf);
- return NULL;
+ }
+ return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
Py_BEGIN_ALLOW_THREADS
update_512(new->state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
- } else {
+ }
+ else {
update_512(new->state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
return newobj;
}
-static void sha3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *buf, Py_ssize_t len) {
- /* Note: we explicitly ignore the error code on the basis that it would take >
- * 1 billion years to hash more than 2^64 bytes. */
+static void
+sha3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *buf, Py_ssize_t len)
+{
+ /*
+ * Note: we explicitly ignore the error code on the basis that it would
+ * take more than 1 billion years to overflow the maximum admissible length
+ * for SHA-3 (2^64 - 1).
+ */
#if PY_SSIZE_T_MAX > UINT32_MAX
- while (len > UINT32_MAX) {
- Hacl_Hash_SHA3_update(state, buf, UINT32_MAX);
- len -= UINT32_MAX;
- buf += UINT32_MAX;
- }
+ while (len > UINT32_MAX) {
+ (void)Hacl_Hash_SHA3_update(state, buf, UINT32_MAX);
+ len -= UINT32_MAX;
+ buf += UINT32_MAX;
+ }
#endif
- /* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */
- Hacl_Hash_SHA3_update(state, buf, (uint32_t) len);
+ /* cast to uint32_t is now safe */
+ (void)Hacl_Hash_SHA3_update(state, buf, (uint32_t)len);
}
/*[clinic input]
if (type == state->sha3_224_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_224);
- } else if (type == state->sha3_256_type) {
+ }
+ else if (type == state->sha3_256_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_256);
- } else if (type == state->sha3_384_type) {
+ }
+ else if (type == state->sha3_384_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_384);
- } else if (type == state->sha3_512_type) {
+ }
+ else if (type == state->sha3_512_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_512);
- } else if (type == state->shake_128_type) {
+ }
+ else if (type == state->shake_128_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_Shake128);
- } else if (type == state->shake_256_type) {
+ }
+ else if (type == state->shake_256_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_Shake256);
- } else {
+ }
+ else {
PyErr_BadInternalCall();
goto error;
}
+ if (self->hash_state == NULL) {
+ (void)PyErr_NoMemory();
+ goto error;
+ }
+
if (data) {
GET_BUFFER_VIEW_OR_ERROR(data, &buf, goto error);
if (buf.len >= HASHLIB_GIL_MINSIZE) {
Py_BEGIN_ALLOW_THREADS
sha3_update(self->hash_state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
- } else {
+ }
+ else {
sha3_update(self->hash_state, buf.buf, buf.len);
}
}
return (PyObject *)self;
- error:
+error:
if (self) {
Py_DECREF(self);
}
ENTER_HASHLIB(self);
newobj->hash_state = Hacl_Hash_SHA3_copy(self->hash_state);
LEAVE_HASHLIB(self);
+ if (newobj->hash_state == NULL) {
+ Py_DECREF(newobj);
+ return PyErr_NoMemory();
+ }
return (PyObject *)newobj;
}
/*[clinic end generated code: output=fd531842e20b2d5b input=5b2a659536bbd248]*/
{
unsigned char digest[SHA3_MAX_DIGESTSIZE];
- // This function errors out if the algorithm is Shake. Here, we know this
+ // This function errors out if the algorithm is SHAKE. Here, we know this
// not to be the case, and therefore do not perform error checking.
ENTER_HASHLIB(self);
- Hacl_Hash_SHA3_digest(self->hash_state, digest);
+ (void)Hacl_Hash_SHA3_digest(self->hash_state, digest);
LEAVE_HASHLIB(self);
return PyBytes_FromStringAndSize((const char *)digest,
Hacl_Hash_SHA3_hash_len(self->hash_state));
{
unsigned char digest[SHA3_MAX_DIGESTSIZE];
ENTER_HASHLIB(self);
- Hacl_Hash_SHA3_digest(self->hash_state, digest);
+ (void)Hacl_Hash_SHA3_digest(self->hash_state, digest);
LEAVE_HASHLIB(self);
return _Py_strhex((const char *)digest,
Hacl_Hash_SHA3_hash_len(self->hash_state));
* - the output length is zero -- we follow the existing behavior and return
* an empty digest, without raising an error */
if (digestlen > 0) {
- Hacl_Hash_SHA3_squeeze(self->hash_state, digest, digestlen);
+ (void)Hacl_Hash_SHA3_squeeze(self->hash_state, digest, digestlen);
}
if (hex) {
result = _Py_strhex((const char *)digest, digestlen);
- } else {
- result = PyBytes_FromStringAndSize((const char *)digest,
- digestlen);
+ }
+ else {
+ result = PyBytes_FromStringAndSize((const char *)digest, digestlen);
}
PyMem_Free(digest);
return result;