2 * Copyright 2021 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2021, Intel Corporation. All Rights Reserved.
5 * Licensed under the Apache License 2.0 (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
12 * AVX512 VAES + VPCLMULDQD support for AES GCM.
13 * This file is included by cipher_aes_gcm_hw_aesni.inc
16 #undef VAES_GCM_ENABLED
17 #if (defined(__x86_64) || defined(__x86_64__) || \
18 defined(_M_AMD64) || defined(_M_X64))
19 # define VAES_GCM_ENABLED
21 /* Returns non-zero when AVX512F + VAES + VPCLMULDQD combination is available */
22 int ossl_vaes_vpclmulqdq_capable(void);
24 # define OSSL_AES_GCM_UPDATE(direction) \
25 void ossl_aes_gcm_ ## direction ## _avx512(const void *ks, \
27 unsigned int *pblocklen, \
28 const unsigned char *in, \
32 OSSL_AES_GCM_UPDATE(encrypt)
33 OSSL_AES_GCM_UPDATE(decrypt)
35 void ossl_aes_gcm_init_avx512(const void *ks, void *gcm128ctx);
36 void ossl_aes_gcm_setiv_avx512(const void *ks, void *gcm128ctx,
37 const unsigned char *iv, size_t ivlen);
38 void ossl_aes_gcm_update_aad_avx512(void *gcm128ctx, const unsigned char *aad,
40 void ossl_aes_gcm_finalize_avx512(void *gcm128ctx, unsigned int pblocklen);
42 void ossl_gcm_gmult_avx512(u64 Xi[2], const void *gcm128ctx);
44 static int vaes_gcm_setkey(PROV_GCM_CTX *ctx, const unsigned char *key,
47 GCM128_CONTEXT *gcmctx = &ctx->gcm;
48 PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
49 AES_KEY *ks = &actx->ks.ks;
52 aesni_set_encrypt_key(key, keylen * 8, ks);
53 memset(gcmctx, 0, sizeof(*gcmctx));
57 ossl_aes_gcm_init_avx512(ks, gcmctx);
62 static int vaes_gcm_setiv(PROV_GCM_CTX *ctx, const unsigned char *iv,
65 GCM128_CONTEXT *gcmctx = &ctx->gcm;
67 gcmctx->Yi.u[0] = 0; /* Current counter */
69 gcmctx->Xi.u[0] = 0; /* AAD hash */
71 gcmctx->len.u[0] = 0; /* AAD length */
72 gcmctx->len.u[1] = 0; /* Message length */
76 /* IV is limited by 2^64 bits, thus 2^61 bytes */
77 if (ivlen > (U64(1) << 61))
80 ossl_aes_gcm_setiv_avx512(ctx->ks, gcmctx, iv, ivlen);
85 static int vaes_gcm_aadupdate(PROV_GCM_CTX *ctx,
86 const unsigned char *aad,
89 GCM128_CONTEXT *gcmctx = &ctx->gcm;
90 u64 alen = gcmctx->len.u[0];
94 /* Bad sequence: call of AAD update after message processing */
95 if (gcmctx->len.u[1] > 0)
99 /* AAD is limited by 2^64 bits, thus 2^61 bytes */
100 if ((alen > (U64(1) << 61)) || (alen < aad_len))
103 gcmctx->len.u[0] = alen;
106 /* Partial AAD block left from previous AAD update calls */
109 * Fill partial block buffer till full block
110 * (note, the hash is stored reflected)
112 while (ares > 0 && aad_len > 0) {
113 gcmctx->Xi.c[15 - ares] ^= *(aad++);
115 ares = (ares + 1) % AES_BLOCK_SIZE;
117 /* Full block gathered */
119 ossl_gcm_gmult_avx512(gcmctx->Xi.u, gcmctx);
120 } else { /* no more AAD */
126 /* Bulk AAD processing */
127 lenBlks = aad_len & ((size_t)(-AES_BLOCK_SIZE));
129 ossl_aes_gcm_update_aad_avx512(gcmctx, aad, lenBlks);
134 /* Add remaining AAD to the hash (note, the hash is stored reflected) */
137 for (i = 0; i < aad_len; i++)
138 gcmctx->Xi.c[15 - i] ^= aad[i];
146 static int vaes_gcm_cipherupdate(PROV_GCM_CTX *ctx, const unsigned char *in,
147 size_t len, unsigned char *out)
149 GCM128_CONTEXT *gcmctx = &ctx->gcm;
150 u64 mlen = gcmctx->len.u[1];
153 if (mlen > ((U64(1) << 36) - 32) || (mlen < len))
156 gcmctx->len.u[1] = mlen;
158 /* Finalize GHASH(AAD) if AAD partial blocks left unprocessed */
159 if (gcmctx->ares > 0) {
160 ossl_gcm_gmult_avx512(gcmctx->Xi.u, gcmctx);
165 ossl_aes_gcm_encrypt_avx512(ctx->ks, gcmctx, &gcmctx->mres, in, len, out);
167 ossl_aes_gcm_decrypt_avx512(ctx->ks, gcmctx, &gcmctx->mres, in, len, out);
172 static int vaes_gcm_cipherfinal(PROV_GCM_CTX *ctx, unsigned char *tag)
174 GCM128_CONTEXT *gcmctx = &ctx->gcm;
175 unsigned int *res = &gcmctx->mres;
177 /* Finalize AAD processing */
178 if (gcmctx->ares > 0)
181 ossl_aes_gcm_finalize_avx512(gcmctx, *res);
184 ctx->taglen = GCM_TAG_MAX_SIZE;
185 memcpy(tag, gcmctx->Xi.c,
186 ctx->taglen <= sizeof(gcmctx->Xi.c) ? ctx->taglen :
187 sizeof(gcmctx->Xi.c));
190 return !CRYPTO_memcmp(gcmctx->Xi.c, tag, ctx->taglen);
196 static const PROV_GCM_HW vaes_gcm = {
200 vaes_gcm_cipherupdate,
201 vaes_gcm_cipherfinal,