buf += CHACHA_BLOCK_SIZE;
}
- memzero_explicit(&chacha_state, sizeof(chacha_state));
+ chacha_zeroize_state(&chacha_state);
}
/*
memzero_explicit(block, sizeof(block));
out_zero_chacha:
- memzero_explicit(&chacha_state, sizeof(chacha_state));
+ chacha_zeroize_state(&chacha_state);
return ret ? ret : -EFAULT;
}
bch2_chacha20_init(&state, key, nonce);
chacha20_crypt(&state, data, data, len);
- memzero_explicit(&state, sizeof(state));
+ chacha_zeroize_state(&state);
}
static void bch2_poly1305_init(struct poly1305_desc_ctx *desc,
chacha20_crypt(&chacha_state, p, p, bv.bv_len);
kunmap_local(p);
}
- memzero_explicit(&chacha_state, sizeof(chacha_state));
+ chacha_zeroize_state(&chacha_state);
return ret;
}
#define _CRYPTO_CHACHA_H
#include <linux/unaligned.h>
+#include <linux/string.h>
#include <linux/types.h>
/* 32-bit stream position, then 96-bit nonce (RFC7539 convention) */
chacha_crypt(state, dst, src, bytes, 20);
}
+static inline void chacha_zeroize_state(struct chacha_state *state)
+{
+ memzero_explicit(state, sizeof(*state));
+}
+
#if IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA)
bool chacha_is_arch_optimized(void);
#else
poly1305_final(&poly1305_state, dst + src_len);
- memzero_explicit(chacha_state, sizeof(*chacha_state));
+ chacha_zeroize_state(chacha_state);
memzero_explicit(&b, sizeof(b));
}
ret = __chacha20poly1305_decrypt(dst, src, src_len, ad, ad_len,
&chacha_state);
- memzero_explicit(&chacha_state, sizeof(chacha_state));
+ chacha_zeroize_state(&chacha_state);
memzero_explicit(iv, sizeof(iv));
memzero_explicit(k, sizeof(k));
return ret;
!crypto_memneq(b.mac[0], b.mac[1], POLY1305_DIGEST_SIZE);
}
- memzero_explicit(&chacha_state, sizeof(chacha_state));
+ chacha_zeroize_state(&chacha_state);
memzero_explicit(&b, sizeof(b));
return ret;