2 * Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 /* Dispatch functions for AES GCM mode */
13 * This file uses the low level AES functions (which are deprecated for
14 * non-internal use) in order to implement provider AES ciphers.
16 #include "internal/deprecated.h"
18 #include "cipher_aes_gcm.h"
20 static int aes_gcm_initkey(PROV_GCM_CTX
*ctx
, const unsigned char *key
,
23 PROV_AES_GCM_CTX
*actx
= (PROV_AES_GCM_CTX
*)ctx
;
24 AES_KEY
*ks
= &actx
->ks
.ks
;
28 # ifdef HWAES_ctr32_encrypt_blocks
29 GCM_HW_SET_KEY_CTR_FN(ks
, HWAES_set_encrypt_key
, HWAES_encrypt
,
30 HWAES_ctr32_encrypt_blocks
);
32 GCM_HW_SET_KEY_CTR_FN(ks
, HWAES_set_encrypt_key
, HWAES_encrypt
, NULL
);
33 # endif /* HWAES_ctr32_encrypt_blocks */
35 # endif /* HWAES_CAPABLE */
39 GCM_HW_SET_KEY_CTR_FN(ks
, AES_set_encrypt_key
, AES_encrypt
,
40 bsaes_ctr32_encrypt_blocks
);
42 # endif /* BSAES_CAPABLE */
46 GCM_HW_SET_KEY_CTR_FN(ks
, vpaes_set_encrypt_key
, vpaes_encrypt
, NULL
);
48 # endif /* VPAES_CAPABLE */
52 GCM_HW_SET_KEY_CTR_FN(ks
, AES_set_encrypt_key
, AES_encrypt
,
55 GCM_HW_SET_KEY_CTR_FN(ks
, AES_set_encrypt_key
, AES_encrypt
, NULL
);
56 # endif /* AES_CTR_ASM */
62 static int generic_aes_gcm_cipher_update(PROV_GCM_CTX
*ctx
, const unsigned char *in
,
63 size_t len
, unsigned char *out
)
66 if (ctx
->ctr
!= NULL
) {
67 #if defined(AES_GCM_ASM)
70 if (len
>= AES_GCM_ENC_BYTES
&& AES_GCM_ASM(ctx
)) {
71 size_t res
= (16 - ctx
->gcm
.mres
) % 16;
73 if (CRYPTO_gcm128_encrypt(&ctx
->gcm
, in
, out
, res
))
76 bulk
= AES_gcm_encrypt(in
+ res
, out
+ res
, len
- res
,
78 ctx
->gcm
.Yi
.c
, ctx
->gcm
.Xi
.u
);
80 ctx
->gcm
.len
.u
[1] += bulk
;
83 if (CRYPTO_gcm128_encrypt_ctr32(&ctx
->gcm
, in
+ bulk
, out
+ bulk
,
84 len
- bulk
, ctx
->ctr
))
87 if (CRYPTO_gcm128_encrypt_ctr32(&ctx
->gcm
, in
, out
, len
, ctx
->ctr
))
89 #endif /* AES_GCM_ASM */
91 if (CRYPTO_gcm128_encrypt(&ctx
->gcm
, in
, out
, len
))
95 if (ctx
->ctr
!= NULL
) {
96 #if defined(AES_GCM_ASM)
99 if (len
>= AES_GCM_DEC_BYTES
&& AES_GCM_ASM(ctx
)) {
100 size_t res
= (16 - ctx
->gcm
.mres
) % 16;
102 if (CRYPTO_gcm128_decrypt(&ctx
->gcm
, in
, out
, res
))
105 bulk
= AES_gcm_decrypt(in
+ res
, out
+ res
, len
- res
,
107 ctx
->gcm
.Yi
.c
, ctx
->gcm
.Xi
.u
);
109 ctx
->gcm
.len
.u
[1] += bulk
;
112 if (CRYPTO_gcm128_decrypt_ctr32(&ctx
->gcm
, in
+ bulk
, out
+ bulk
,
113 len
- bulk
, ctx
->ctr
))
116 if (CRYPTO_gcm128_decrypt_ctr32(&ctx
->gcm
, in
, out
, len
, ctx
->ctr
))
118 #endif /* AES_GCM_ASM */
120 if (CRYPTO_gcm128_decrypt(&ctx
->gcm
, in
, out
, len
))
127 static const PROV_GCM_HW aes_gcm
= {
131 generic_aes_gcm_cipher_update
,
132 ossl_gcm_cipher_final
,
136 #if defined(S390X_aes_128_CAPABLE)
137 # include "cipher_aes_gcm_hw_s390x.inc"
138 #elif defined(AESNI_CAPABLE)
139 # include "cipher_aes_gcm_hw_aesni.inc"
140 #elif defined(SPARC_AES_CAPABLE)
141 # include "cipher_aes_gcm_hw_t4.inc"
142 #elif defined(AES_PMULL_CAPABLE) && defined(AES_GCM_ASM)
143 # include "cipher_aes_gcm_hw_armv8.inc"
145 const PROV_GCM_HW
*ossl_prov_aes_hw_gcm(size_t keybits
)