2 * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
12 #include <openssl/opensslconf.h>
13 #include <openssl/crypto.h>
14 #include <openssl/evp.h>
15 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include <openssl/rand.h>
18 #include <openssl/cmac.h>
19 #include "internal/evp_int.h"
20 #include "internal/cryptlib.h"
21 #include "internal/modes_int.h"
22 #include "modes_lcl.h"
41 } ks
; /* AES key schedule to use */
42 int key_set
; /* Set if key initialised */
43 int iv_set
; /* Set if an iv is set */
45 unsigned char *iv
; /* Temporary IV store */
46 int ivlen
; /* IV length */
48 int iv_gen
; /* It is OK to generate IVs */
49 int iv_gen_rand
; /* No IV was specified, so generate a rand IV */
50 int tls_aad_len
; /* TLS AAD length */
51 uint64_t tls_enc_records
; /* Number of TLS records encrypted */
59 } ks1
, ks2
; /* AES key schedules to use */
61 void (*stream
) (const unsigned char *in
,
62 unsigned char *out
, size_t length
,
63 const AES_KEY
*key1
, const AES_KEY
*key2
,
64 const unsigned char iv
[16]);
68 static const int allow_insecure_decrypt
= 0;
70 static const int allow_insecure_decrypt
= 1;
77 } ks
; /* AES key schedule to use */
78 int key_set
; /* Set if key initialised */
79 int iv_set
; /* Set if an iv is set */
80 int tag_set
; /* Set if tag is valid */
81 int len_set
; /* Set if message length set */
82 int L
, M
; /* L and M parameters from RFC3610 */
83 int tls_aad_len
; /* TLS AAD length */
88 #ifndef OPENSSL_NO_OCB
93 } ksenc
; /* AES key schedule to use for encryption */
97 } ksdec
; /* AES key schedule to use for decryption */
98 int key_set
; /* Set if key initialised */
99 int iv_set
; /* Set if an iv is set */
101 unsigned char *iv
; /* Temporary IV store */
102 unsigned char tag
[16];
103 unsigned char data_buf
[16]; /* Store partial data blocks */
104 unsigned char aad_buf
[16]; /* Store partial AAD blocks */
107 int ivlen
; /* IV length */
112 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
115 int vpaes_set_encrypt_key(const unsigned char *userKey
, int bits
,
117 int vpaes_set_decrypt_key(const unsigned char *userKey
, int bits
,
120 void vpaes_encrypt(const unsigned char *in
, unsigned char *out
,
122 void vpaes_decrypt(const unsigned char *in
, unsigned char *out
,
125 void vpaes_cbc_encrypt(const unsigned char *in
,
128 const AES_KEY
*key
, unsigned char *ivec
, int enc
);
131 void bsaes_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
132 size_t length
, const AES_KEY
*key
,
133 unsigned char ivec
[16], int enc
);
134 void bsaes_ctr32_encrypt_blocks(const unsigned char *in
, unsigned char *out
,
135 size_t len
, const AES_KEY
*key
,
136 const unsigned char ivec
[16]);
137 void bsaes_xts_encrypt(const unsigned char *inp
, unsigned char *out
,
138 size_t len
, const AES_KEY
*key1
,
139 const AES_KEY
*key2
, const unsigned char iv
[16]);
140 void bsaes_xts_decrypt(const unsigned char *inp
, unsigned char *out
,
141 size_t len
, const AES_KEY
*key1
,
142 const AES_KEY
*key2
, const unsigned char iv
[16]);
145 void AES_ctr32_encrypt(const unsigned char *in
, unsigned char *out
,
146 size_t blocks
, const AES_KEY
*key
,
147 const unsigned char ivec
[AES_BLOCK_SIZE
]);
150 void AES_xts_encrypt(const unsigned char *inp
, unsigned char *out
, size_t len
,
151 const AES_KEY
*key1
, const AES_KEY
*key2
,
152 const unsigned char iv
[16]);
153 void AES_xts_decrypt(const unsigned char *inp
, unsigned char *out
, size_t len
,
154 const AES_KEY
*key1
, const AES_KEY
*key2
,
155 const unsigned char iv
[16]);
158 /* increment counter (64-bit int) by 1 */
159 static void ctr64_inc(unsigned char *counter
)
174 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
175 # include "ppc_arch.h"
177 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
179 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
180 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
181 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
182 # define HWAES_encrypt aes_p8_encrypt
183 # define HWAES_decrypt aes_p8_decrypt
184 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
185 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
186 # define HWAES_xts_encrypt aes_p8_xts_encrypt
187 # define HWAES_xts_decrypt aes_p8_xts_decrypt
190 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
191 ((defined(__i386) || defined(__i386__) || \
192 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
193 defined(__x86_64) || defined(__x86_64__) || \
194 defined(_M_AMD64) || defined(_M_X64) )
196 extern unsigned int OPENSSL_ia32cap_P
[];
199 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
202 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
207 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
209 int aesni_set_encrypt_key(const unsigned char *userKey
, int bits
,
211 int aesni_set_decrypt_key(const unsigned char *userKey
, int bits
,
214 void aesni_encrypt(const unsigned char *in
, unsigned char *out
,
216 void aesni_decrypt(const unsigned char *in
, unsigned char *out
,
219 void aesni_ecb_encrypt(const unsigned char *in
,
221 size_t length
, const AES_KEY
*key
, int enc
);
222 void aesni_cbc_encrypt(const unsigned char *in
,
225 const AES_KEY
*key
, unsigned char *ivec
, int enc
);
227 void aesni_ctr32_encrypt_blocks(const unsigned char *in
,
230 const void *key
, const unsigned char *ivec
);
232 void aesni_xts_encrypt(const unsigned char *in
,
235 const AES_KEY
*key1
, const AES_KEY
*key2
,
236 const unsigned char iv
[16]);
238 void aesni_xts_decrypt(const unsigned char *in
,
241 const AES_KEY
*key1
, const AES_KEY
*key2
,
242 const unsigned char iv
[16]);
244 void aesni_ccm64_encrypt_blocks(const unsigned char *in
,
248 const unsigned char ivec
[16],
249 unsigned char cmac
[16]);
251 void aesni_ccm64_decrypt_blocks(const unsigned char *in
,
255 const unsigned char ivec
[16],
256 unsigned char cmac
[16]);
258 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
259 size_t aesni_gcm_encrypt(const unsigned char *in
,
262 const void *key
, unsigned char ivec
[16], u64
*Xi
);
263 # define AES_gcm_encrypt aesni_gcm_encrypt
264 size_t aesni_gcm_decrypt(const unsigned char *in
,
267 const void *key
, unsigned char ivec
[16], u64
*Xi
);
268 # define AES_gcm_decrypt aesni_gcm_decrypt
269 void gcm_ghash_avx(u64 Xi
[2], const u128 Htable
[16], const u8
*in
,
271 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
272 gctx->gcm.ghash==gcm_ghash_avx)
273 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
274 gctx->gcm.ghash==gcm_ghash_avx)
275 # undef AES_GCM_ASM2 /* minor size optimization */
278 static int aesni_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
279 const unsigned char *iv
, int enc
)
282 EVP_AES_KEY
*dat
= EVP_C_DATA(EVP_AES_KEY
,ctx
);
284 mode
= EVP_CIPHER_CTX_mode(ctx
);
285 if ((mode
== EVP_CIPH_ECB_MODE
|| mode
== EVP_CIPH_CBC_MODE
)
287 ret
= aesni_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
289 dat
->block
= (block128_f
) aesni_decrypt
;
290 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
291 (cbc128_f
) aesni_cbc_encrypt
: NULL
;
293 ret
= aesni_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
295 dat
->block
= (block128_f
) aesni_encrypt
;
296 if (mode
== EVP_CIPH_CBC_MODE
)
297 dat
->stream
.cbc
= (cbc128_f
) aesni_cbc_encrypt
;
298 else if (mode
== EVP_CIPH_CTR_MODE
)
299 dat
->stream
.ctr
= (ctr128_f
) aesni_ctr32_encrypt_blocks
;
301 dat
->stream
.cbc
= NULL
;
305 EVPerr(EVP_F_AESNI_INIT_KEY
, EVP_R_AES_KEY_SETUP_FAILED
);
312 static int aesni_cbc_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
313 const unsigned char *in
, size_t len
)
315 aesni_cbc_encrypt(in
, out
, len
, &EVP_C_DATA(EVP_AES_KEY
,ctx
)->ks
.ks
,
316 EVP_CIPHER_CTX_iv_noconst(ctx
),
317 EVP_CIPHER_CTX_encrypting(ctx
));
322 static int aesni_ecb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
323 const unsigned char *in
, size_t len
)
325 size_t bl
= EVP_CIPHER_CTX_block_size(ctx
);
330 aesni_ecb_encrypt(in
, out
, len
, &EVP_C_DATA(EVP_AES_KEY
,ctx
)->ks
.ks
,
331 EVP_CIPHER_CTX_encrypting(ctx
));
336 # define aesni_ofb_cipher aes_ofb_cipher
337 static int aesni_ofb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
338 const unsigned char *in
, size_t len
);
340 # define aesni_cfb_cipher aes_cfb_cipher
341 static int aesni_cfb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
342 const unsigned char *in
, size_t len
);
344 # define aesni_cfb8_cipher aes_cfb8_cipher
345 static int aesni_cfb8_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
346 const unsigned char *in
, size_t len
);
348 # define aesni_cfb1_cipher aes_cfb1_cipher
349 static int aesni_cfb1_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
350 const unsigned char *in
, size_t len
);
352 # define aesni_ctr_cipher aes_ctr_cipher
353 static int aesni_ctr_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
354 const unsigned char *in
, size_t len
);
356 static int aesni_gcm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
357 const unsigned char *iv
, int enc
)
359 EVP_AES_GCM_CTX
*gctx
= EVP_C_DATA(EVP_AES_GCM_CTX
,ctx
);
363 aesni_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
365 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
, (block128_f
) aesni_encrypt
);
366 gctx
->ctr
= (ctr128_f
) aesni_ctr32_encrypt_blocks
;
368 * If we have an iv can set it directly, otherwise use saved IV.
370 if (iv
== NULL
&& gctx
->iv_set
)
373 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
378 /* If key set use IV, otherwise copy */
380 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
382 memcpy(gctx
->iv
, iv
, gctx
->ivlen
);
389 # define aesni_gcm_cipher aes_gcm_cipher
390 static int aesni_gcm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
391 const unsigned char *in
, size_t len
);
393 static int aesni_xts_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
394 const unsigned char *iv
, int enc
)
396 EVP_AES_XTS_CTX
*xctx
= EVP_C_DATA(EVP_AES_XTS_CTX
,ctx
);
402 /* The key is two half length keys in reality */
403 const int bytes
= EVP_CIPHER_CTX_key_length(ctx
) / 2;
404 const int bits
= bytes
* 8;
407 * Verify that the two keys are different.
409 * This addresses Rogaway's vulnerability.
410 * See comment in aes_xts_init_key() below.
412 if ((!allow_insecure_decrypt
|| enc
)
413 && CRYPTO_memcmp(key
, key
+ bytes
, bytes
) == 0) {
414 EVPerr(EVP_F_AESNI_XTS_INIT_KEY
, EVP_R_XTS_DUPLICATED_KEYS
);
418 /* key_len is two AES keys */
420 aesni_set_encrypt_key(key
, bits
, &xctx
->ks1
.ks
);
421 xctx
->xts
.block1
= (block128_f
) aesni_encrypt
;
422 xctx
->stream
= aesni_xts_encrypt
;
424 aesni_set_decrypt_key(key
, bits
, &xctx
->ks1
.ks
);
425 xctx
->xts
.block1
= (block128_f
) aesni_decrypt
;
426 xctx
->stream
= aesni_xts_decrypt
;
429 aesni_set_encrypt_key(key
+ bytes
, bits
, &xctx
->ks2
.ks
);
430 xctx
->xts
.block2
= (block128_f
) aesni_encrypt
;
432 xctx
->xts
.key1
= &xctx
->ks1
;
436 xctx
->xts
.key2
= &xctx
->ks2
;
437 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx
), iv
, 16);
443 # define aesni_xts_cipher aes_xts_cipher
444 static int aesni_xts_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
445 const unsigned char *in
, size_t len
);
447 static int aesni_ccm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
448 const unsigned char *iv
, int enc
)
450 EVP_AES_CCM_CTX
*cctx
= EVP_C_DATA(EVP_AES_CCM_CTX
,ctx
);
454 aesni_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
456 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
457 &cctx
->ks
, (block128_f
) aesni_encrypt
);
458 cctx
->str
= enc
? (ccm128_f
) aesni_ccm64_encrypt_blocks
:
459 (ccm128_f
) aesni_ccm64_decrypt_blocks
;
463 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx
), iv
, 15 - cctx
->L
);
469 # define aesni_ccm_cipher aes_ccm_cipher
470 static int aesni_ccm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
471 const unsigned char *in
, size_t len
);
473 # ifndef OPENSSL_NO_OCB
474 void aesni_ocb_encrypt(const unsigned char *in
, unsigned char *out
,
475 size_t blocks
, const void *key
,
476 size_t start_block_num
,
477 unsigned char offset_i
[16],
478 const unsigned char L_
[][16],
479 unsigned char checksum
[16]);
480 void aesni_ocb_decrypt(const unsigned char *in
, unsigned char *out
,
481 size_t blocks
, const void *key
,
482 size_t start_block_num
,
483 unsigned char offset_i
[16],
484 const unsigned char L_
[][16],
485 unsigned char checksum
[16]);
487 static int aesni_ocb_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
488 const unsigned char *iv
, int enc
)
490 EVP_AES_OCB_CTX
*octx
= EVP_C_DATA(EVP_AES_OCB_CTX
,ctx
);
496 * We set both the encrypt and decrypt key here because decrypt
497 * needs both. We could possibly optimise to remove setting the
498 * decrypt for an encryption operation.
500 aesni_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
502 aesni_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
504 if (!CRYPTO_ocb128_init(&octx
->ocb
,
505 &octx
->ksenc
.ks
, &octx
->ksdec
.ks
,
506 (block128_f
) aesni_encrypt
,
507 (block128_f
) aesni_decrypt
,
508 enc
? aesni_ocb_encrypt
509 : aesni_ocb_decrypt
))
515 * If we have an iv we can set it directly, otherwise use saved IV.
517 if (iv
== NULL
&& octx
->iv_set
)
520 if (CRYPTO_ocb128_setiv(&octx
->ocb
, iv
, octx
->ivlen
, octx
->taglen
)
527 /* If key set use IV, otherwise copy */
529 CRYPTO_ocb128_setiv(&octx
->ocb
, iv
, octx
->ivlen
, octx
->taglen
);
531 memcpy(octx
->iv
, iv
, octx
->ivlen
);
537 # define aesni_ocb_cipher aes_ocb_cipher
538 static int aesni_ocb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
539 const unsigned char *in
, size_t len
);
540 # endif /* OPENSSL_NO_OCB */
542 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
543 static const EVP_CIPHER aesni_##keylen##_##mode = { \
544 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
545 flags|EVP_CIPH_##MODE##_MODE, \
547 aesni_##mode##_cipher, \
549 sizeof(EVP_AES_KEY), \
550 NULL,NULL,NULL,NULL }; \
551 static const EVP_CIPHER aes_##keylen##_##mode = { \
552 nid##_##keylen##_##nmode,blocksize, \
554 flags|EVP_CIPH_##MODE##_MODE, \
556 aes_##mode##_cipher, \
558 sizeof(EVP_AES_KEY), \
559 NULL,NULL,NULL,NULL }; \
560 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
561 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
563 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
564 static const EVP_CIPHER aesni_##keylen##_##mode = { \
565 nid##_##keylen##_##mode,blocksize, \
566 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
568 flags|EVP_CIPH_##MODE##_MODE, \
569 aesni_##mode##_init_key, \
570 aesni_##mode##_cipher, \
571 aes_##mode##_cleanup, \
572 sizeof(EVP_AES_##MODE##_CTX), \
573 NULL,NULL,aes_##mode##_ctrl,NULL }; \
574 static const EVP_CIPHER aes_##keylen##_##mode = { \
575 nid##_##keylen##_##mode,blocksize, \
576 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
578 flags|EVP_CIPH_##MODE##_MODE, \
579 aes_##mode##_init_key, \
580 aes_##mode##_cipher, \
581 aes_##mode##_cleanup, \
582 sizeof(EVP_AES_##MODE##_CTX), \
583 NULL,NULL,aes_##mode##_ctrl,NULL }; \
584 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
585 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
587 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
589 # include "sparc_arch.h"
591 extern unsigned int OPENSSL_sparcv9cap_P
[];
594 * Initial Fujitsu SPARC64 X support
596 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
597 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
598 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
599 # define HWAES_encrypt aes_fx_encrypt
600 # define HWAES_decrypt aes_fx_decrypt
601 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
602 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
604 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
606 void aes_t4_set_encrypt_key(const unsigned char *key
, int bits
, AES_KEY
*ks
);
607 void aes_t4_set_decrypt_key(const unsigned char *key
, int bits
, AES_KEY
*ks
);
608 void aes_t4_encrypt(const unsigned char *in
, unsigned char *out
,
610 void aes_t4_decrypt(const unsigned char *in
, unsigned char *out
,
613 * Key-length specific subroutines were chosen for following reason.
614 * Each SPARC T4 core can execute up to 8 threads which share core's
615 * resources. Loading as much key material to registers allows to
616 * minimize references to shared memory interface, as well as amount
617 * of instructions in inner loops [much needed on T4]. But then having
618 * non-key-length specific routines would require conditional branches
619 * either in inner loops or on subroutines' entries. Former is hardly
620 * acceptable, while latter means code size increase to size occupied
621 * by multiple key-length specific subroutines, so why fight?
623 void aes128_t4_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
624 size_t len
, const AES_KEY
*key
,
625 unsigned char *ivec
);
626 void aes128_t4_cbc_decrypt(const unsigned char *in
, unsigned char *out
,
627 size_t len
, const AES_KEY
*key
,
628 unsigned char *ivec
);
629 void aes192_t4_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
630 size_t len
, const AES_KEY
*key
,
631 unsigned char *ivec
);
632 void aes192_t4_cbc_decrypt(const unsigned char *in
, unsigned char *out
,
633 size_t len
, const AES_KEY
*key
,
634 unsigned char *ivec
);
635 void aes256_t4_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
636 size_t len
, const AES_KEY
*key
,
637 unsigned char *ivec
);
638 void aes256_t4_cbc_decrypt(const unsigned char *in
, unsigned char *out
,
639 size_t len
, const AES_KEY
*key
,
640 unsigned char *ivec
);
641 void aes128_t4_ctr32_encrypt(const unsigned char *in
, unsigned char *out
,
642 size_t blocks
, const AES_KEY
*key
,
643 unsigned char *ivec
);
644 void aes192_t4_ctr32_encrypt(const unsigned char *in
, unsigned char *out
,
645 size_t blocks
, const AES_KEY
*key
,
646 unsigned char *ivec
);
647 void aes256_t4_ctr32_encrypt(const unsigned char *in
, unsigned char *out
,
648 size_t blocks
, const AES_KEY
*key
,
649 unsigned char *ivec
);
650 void aes128_t4_xts_encrypt(const unsigned char *in
, unsigned char *out
,
651 size_t blocks
, const AES_KEY
*key1
,
652 const AES_KEY
*key2
, const unsigned char *ivec
);
653 void aes128_t4_xts_decrypt(const unsigned char *in
, unsigned char *out
,
654 size_t blocks
, const AES_KEY
*key1
,
655 const AES_KEY
*key2
, const unsigned char *ivec
);
656 void aes256_t4_xts_encrypt(const unsigned char *in
, unsigned char *out
,
657 size_t blocks
, const AES_KEY
*key1
,
658 const AES_KEY
*key2
, const unsigned char *ivec
);
659 void aes256_t4_xts_decrypt(const unsigned char *in
, unsigned char *out
,
660 size_t blocks
, const AES_KEY
*key1
,
661 const AES_KEY
*key2
, const unsigned char *ivec
);
663 static int aes_t4_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
664 const unsigned char *iv
, int enc
)
667 EVP_AES_KEY
*dat
= EVP_C_DATA(EVP_AES_KEY
,ctx
);
669 mode
= EVP_CIPHER_CTX_mode(ctx
);
670 bits
= EVP_CIPHER_CTX_key_length(ctx
) * 8;
671 if ((mode
== EVP_CIPH_ECB_MODE
|| mode
== EVP_CIPH_CBC_MODE
)
674 aes_t4_set_decrypt_key(key
, bits
, &dat
->ks
.ks
);
675 dat
->block
= (block128_f
) aes_t4_decrypt
;
678 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
679 (cbc128_f
) aes128_t4_cbc_decrypt
: NULL
;
682 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
683 (cbc128_f
) aes192_t4_cbc_decrypt
: NULL
;
686 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
687 (cbc128_f
) aes256_t4_cbc_decrypt
: NULL
;
694 aes_t4_set_encrypt_key(key
, bits
, &dat
->ks
.ks
);
695 dat
->block
= (block128_f
) aes_t4_encrypt
;
698 if (mode
== EVP_CIPH_CBC_MODE
)
699 dat
->stream
.cbc
= (cbc128_f
) aes128_t4_cbc_encrypt
;
700 else if (mode
== EVP_CIPH_CTR_MODE
)
701 dat
->stream
.ctr
= (ctr128_f
) aes128_t4_ctr32_encrypt
;
703 dat
->stream
.cbc
= NULL
;
706 if (mode
== EVP_CIPH_CBC_MODE
)
707 dat
->stream
.cbc
= (cbc128_f
) aes192_t4_cbc_encrypt
;
708 else if (mode
== EVP_CIPH_CTR_MODE
)
709 dat
->stream
.ctr
= (ctr128_f
) aes192_t4_ctr32_encrypt
;
711 dat
->stream
.cbc
= NULL
;
714 if (mode
== EVP_CIPH_CBC_MODE
)
715 dat
->stream
.cbc
= (cbc128_f
) aes256_t4_cbc_encrypt
;
716 else if (mode
== EVP_CIPH_CTR_MODE
)
717 dat
->stream
.ctr
= (ctr128_f
) aes256_t4_ctr32_encrypt
;
719 dat
->stream
.cbc
= NULL
;
727 EVPerr(EVP_F_AES_T4_INIT_KEY
, EVP_R_AES_KEY_SETUP_FAILED
);
734 # define aes_t4_cbc_cipher aes_cbc_cipher
735 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
736 const unsigned char *in
, size_t len
);
738 # define aes_t4_ecb_cipher aes_ecb_cipher
739 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
740 const unsigned char *in
, size_t len
);
742 # define aes_t4_ofb_cipher aes_ofb_cipher
743 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
744 const unsigned char *in
, size_t len
);
746 # define aes_t4_cfb_cipher aes_cfb_cipher
747 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
748 const unsigned char *in
, size_t len
);
750 # define aes_t4_cfb8_cipher aes_cfb8_cipher
751 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
752 const unsigned char *in
, size_t len
);
754 # define aes_t4_cfb1_cipher aes_cfb1_cipher
755 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
756 const unsigned char *in
, size_t len
);
758 # define aes_t4_ctr_cipher aes_ctr_cipher
759 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
760 const unsigned char *in
, size_t len
);
762 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
763 const unsigned char *iv
, int enc
)
765 EVP_AES_GCM_CTX
*gctx
= EVP_C_DATA(EVP_AES_GCM_CTX
,ctx
);
769 int bits
= EVP_CIPHER_CTX_key_length(ctx
) * 8;
770 aes_t4_set_encrypt_key(key
, bits
, &gctx
->ks
.ks
);
771 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
,
772 (block128_f
) aes_t4_encrypt
);
775 gctx
->ctr
= (ctr128_f
) aes128_t4_ctr32_encrypt
;
778 gctx
->ctr
= (ctr128_f
) aes192_t4_ctr32_encrypt
;
781 gctx
->ctr
= (ctr128_f
) aes256_t4_ctr32_encrypt
;
787 * If we have an iv can set it directly, otherwise use saved IV.
789 if (iv
== NULL
&& gctx
->iv_set
)
792 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
797 /* If key set use IV, otherwise copy */
799 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
801 memcpy(gctx
->iv
, iv
, gctx
->ivlen
);
808 # define aes_t4_gcm_cipher aes_gcm_cipher
809 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
810 const unsigned char *in
, size_t len
);
812 static int aes_t4_xts_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
813 const unsigned char *iv
, int enc
)
815 EVP_AES_XTS_CTX
*xctx
= EVP_C_DATA(EVP_AES_XTS_CTX
,ctx
);
821 /* The key is two half length keys in reality */
822 const int bytes
= EVP_CIPHER_CTX_key_length(ctx
) / 2;
823 const int bits
= bytes
* 8;
826 * Verify that the two keys are different.
828 * This addresses Rogaway's vulnerability.
829 * See comment in aes_xts_init_key() below.
831 if ((!allow_insecure_decrypt
|| enc
)
832 && CRYPTO_memcmp(key
, key
+ bytes
, bytes
) == 0) {
833 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY
, EVP_R_XTS_DUPLICATED_KEYS
);
838 /* key_len is two AES keys */
840 aes_t4_set_encrypt_key(key
, bits
, &xctx
->ks1
.ks
);
841 xctx
->xts
.block1
= (block128_f
) aes_t4_encrypt
;
844 xctx
->stream
= aes128_t4_xts_encrypt
;
847 xctx
->stream
= aes256_t4_xts_encrypt
;
853 aes_t4_set_decrypt_key(key
, bits
, &xctx
->ks1
.ks
);
854 xctx
->xts
.block1
= (block128_f
) aes_t4_decrypt
;
857 xctx
->stream
= aes128_t4_xts_decrypt
;
860 xctx
->stream
= aes256_t4_xts_decrypt
;
867 aes_t4_set_encrypt_key(key
+ bytes
, bits
, &xctx
->ks2
.ks
);
868 xctx
->xts
.block2
= (block128_f
) aes_t4_encrypt
;
870 xctx
->xts
.key1
= &xctx
->ks1
;
874 xctx
->xts
.key2
= &xctx
->ks2
;
875 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx
), iv
, 16);
881 # define aes_t4_xts_cipher aes_xts_cipher
882 static int aes_t4_xts_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
883 const unsigned char *in
, size_t len
);
885 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
886 const unsigned char *iv
, int enc
)
888 EVP_AES_CCM_CTX
*cctx
= EVP_C_DATA(EVP_AES_CCM_CTX
,ctx
);
892 int bits
= EVP_CIPHER_CTX_key_length(ctx
) * 8;
893 aes_t4_set_encrypt_key(key
, bits
, &cctx
->ks
.ks
);
894 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
895 &cctx
->ks
, (block128_f
) aes_t4_encrypt
);
900 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx
), iv
, 15 - cctx
->L
);
906 # define aes_t4_ccm_cipher aes_ccm_cipher
907 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
908 const unsigned char *in
, size_t len
);
910 # ifndef OPENSSL_NO_OCB
911 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
912 const unsigned char *iv
, int enc
)
914 EVP_AES_OCB_CTX
*octx
= EVP_C_DATA(EVP_AES_OCB_CTX
,ctx
);
920 * We set both the encrypt and decrypt key here because decrypt
921 * needs both. We could possibly optimise to remove setting the
922 * decrypt for an encryption operation.
924 aes_t4_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
926 aes_t4_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
928 if (!CRYPTO_ocb128_init(&octx
->ocb
,
929 &octx
->ksenc
.ks
, &octx
->ksdec
.ks
,
930 (block128_f
) aes_t4_encrypt
,
931 (block128_f
) aes_t4_decrypt
,
938 * If we have an iv we can set it directly, otherwise use saved IV.
940 if (iv
== NULL
&& octx
->iv_set
)
943 if (CRYPTO_ocb128_setiv(&octx
->ocb
, iv
, octx
->ivlen
, octx
->taglen
)
950 /* If key set use IV, otherwise copy */
952 CRYPTO_ocb128_setiv(&octx
->ocb
, iv
, octx
->ivlen
, octx
->taglen
);
954 memcpy(octx
->iv
, iv
, octx
->ivlen
);
960 # define aes_t4_ocb_cipher aes_ocb_cipher
961 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
962 const unsigned char *in
, size_t len
);
963 # endif /* OPENSSL_NO_OCB */
965 # ifndef OPENSSL_NO_SIV
966 # define aes_t4_siv_init_key aes_siv_init_key
967 # define aes_t4_siv_cipher aes_siv_cipher
968 # endif /* OPENSSL_NO_SIV */
970 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
971 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
972 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
973 flags|EVP_CIPH_##MODE##_MODE, \
975 aes_t4_##mode##_cipher, \
977 sizeof(EVP_AES_KEY), \
978 NULL,NULL,NULL,NULL }; \
979 static const EVP_CIPHER aes_##keylen##_##mode = { \
980 nid##_##keylen##_##nmode,blocksize, \
982 flags|EVP_CIPH_##MODE##_MODE, \
984 aes_##mode##_cipher, \
986 sizeof(EVP_AES_KEY), \
987 NULL,NULL,NULL,NULL }; \
988 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
989 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
991 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
992 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
993 nid##_##keylen##_##mode,blocksize, \
994 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
996 flags|EVP_CIPH_##MODE##_MODE, \
997 aes_t4_##mode##_init_key, \
998 aes_t4_##mode##_cipher, \
999 aes_##mode##_cleanup, \
1000 sizeof(EVP_AES_##MODE##_CTX), \
1001 NULL,NULL,aes_##mode##_ctrl,NULL }; \
1002 static const EVP_CIPHER aes_##keylen##_##mode = { \
1003 nid##_##keylen##_##mode,blocksize, \
1004 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
1006 flags|EVP_CIPH_##MODE##_MODE, \
1007 aes_##mode##_init_key, \
1008 aes_##mode##_cipher, \
1009 aes_##mode##_cleanup, \
1010 sizeof(EVP_AES_##MODE##_CTX), \
1011 NULL,NULL,aes_##mode##_ctrl,NULL }; \
1012 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
1013 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
1015 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
1019 # include "s390x_arch.h"
1025 * KM-AES parameter block - begin
1026 * (see z/Architecture Principles of Operation >= SA22-7832-06)
1029 unsigned char k
[32];
1031 /* KM-AES parameter block - end */
1034 } S390X_AES_ECB_CTX
;
1040 * KMO-AES parameter block - begin
1041 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1044 unsigned char cv
[16];
1045 unsigned char k
[32];
1047 /* KMO-AES parameter block - end */
1052 } S390X_AES_OFB_CTX
;
1058 * KMF-AES parameter block - begin
1059 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1062 unsigned char cv
[16];
1063 unsigned char k
[32];
1065 /* KMF-AES parameter block - end */
1070 } S390X_AES_CFB_CTX
;
1076 * KMA-GCM-AES parameter block - begin
1077 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1080 unsigned char reserved
[12];
1086 unsigned long long g
[2];
1087 unsigned char b
[16];
1089 unsigned char h
[16];
1090 unsigned long long taadl
;
1091 unsigned long long tpcl
;
1093 unsigned long long g
[2];
1096 unsigned char k
[32];
1098 /* KMA-GCM-AES parameter block - end */
1110 unsigned char ares
[16];
1111 unsigned char mres
[16];
1112 unsigned char kres
[16];
1118 uint64_t tls_enc_records
; /* Number of TLS records encrypted */
1119 } S390X_AES_GCM_CTX
;
1125 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1126 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1127 * rounds field is used to store the function code and that the key
1128 * schedule is not stored (if aes hardware support is detected).
1131 unsigned char pad
[16];
1137 * KMAC-AES parameter block - begin
1138 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1142 unsigned long long g
[2];
1143 unsigned char b
[16];
1145 unsigned char k
[32];
1147 /* KMAC-AES paramater block - end */
1150 unsigned long long g
[2];
1151 unsigned char b
[16];
1154 unsigned long long g
[2];
1155 unsigned char b
[16];
1158 unsigned long long blocks
;
1167 unsigned char pad
[140];
1171 } S390X_AES_CCM_CTX
;
1173 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1174 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1176 /* Most modes of operation need km for partial block processing. */
1177 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1178 S390X_CAPBIT(S390X_AES_128))
1179 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1180 S390X_CAPBIT(S390X_AES_192))
1181 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1182 S390X_CAPBIT(S390X_AES_256))
1184 # define s390x_aes_init_key aes_init_key
1185 static int s390x_aes_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
1186 const unsigned char *iv
, int enc
);
1188 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1189 # define S390X_aes_192_cbc_CAPABLE 1
1190 # define S390X_aes_256_cbc_CAPABLE 1
1191 # define S390X_AES_CBC_CTX EVP_AES_KEY
1193 # define s390x_aes_cbc_init_key aes_init_key
1195 # define s390x_aes_cbc_cipher aes_cbc_cipher
1196 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1197 const unsigned char *in
, size_t len
);
1199 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1200 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1201 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1203 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX
*ctx
,
1204 const unsigned char *key
,
1205 const unsigned char *iv
, int enc
)
1207 S390X_AES_ECB_CTX
*cctx
= EVP_C_DATA(S390X_AES_ECB_CTX
, ctx
);
1208 const int keylen
= EVP_CIPHER_CTX_key_length(ctx
);
1210 cctx
->fc
= S390X_AES_FC(keylen
);
1212 cctx
->fc
|= S390X_DECRYPT
;
1214 memcpy(cctx
->km
.param
.k
, key
, keylen
);
1218 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1219 const unsigned char *in
, size_t len
)
1221 S390X_AES_ECB_CTX
*cctx
= EVP_C_DATA(S390X_AES_ECB_CTX
, ctx
);
1223 s390x_km(in
, len
, out
, cctx
->fc
, &cctx
->km
.param
);
1227 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1228 (OPENSSL_s390xcap_P.kmo[0] & \
1229 S390X_CAPBIT(S390X_AES_128)))
1230 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1231 (OPENSSL_s390xcap_P.kmo[0] & \
1232 S390X_CAPBIT(S390X_AES_192)))
1233 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1234 (OPENSSL_s390xcap_P.kmo[0] & \
1235 S390X_CAPBIT(S390X_AES_256)))
1237 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX
*ctx
,
1238 const unsigned char *key
,
1239 const unsigned char *ivec
, int enc
)
1241 S390X_AES_OFB_CTX
*cctx
= EVP_C_DATA(S390X_AES_OFB_CTX
, ctx
);
1242 const unsigned char *iv
= EVP_CIPHER_CTX_original_iv(ctx
);
1243 const int keylen
= EVP_CIPHER_CTX_key_length(ctx
);
1244 const int ivlen
= EVP_CIPHER_CTX_iv_length(ctx
);
1246 memcpy(cctx
->kmo
.param
.cv
, iv
, ivlen
);
1247 memcpy(cctx
->kmo
.param
.k
, key
, keylen
);
1248 cctx
->fc
= S390X_AES_FC(keylen
);
1253 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1254 const unsigned char *in
, size_t len
)
1256 S390X_AES_OFB_CTX
*cctx
= EVP_C_DATA(S390X_AES_OFB_CTX
, ctx
);
1261 *out
= *in
^ cctx
->kmo
.param
.cv
[n
];
1270 len
&= ~(size_t)0xf;
1272 s390x_kmo(in
, len
, out
, cctx
->fc
, &cctx
->kmo
.param
);
1279 s390x_km(cctx
->kmo
.param
.cv
, 16, cctx
->kmo
.param
.cv
, cctx
->fc
,
1283 out
[n
] = in
[n
] ^ cctx
->kmo
.param
.cv
[n
];
1292 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1293 (OPENSSL_s390xcap_P.kmf[0] & \
1294 S390X_CAPBIT(S390X_AES_128)))
1295 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1296 (OPENSSL_s390xcap_P.kmf[0] & \
1297 S390X_CAPBIT(S390X_AES_192)))
1298 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1299 (OPENSSL_s390xcap_P.kmf[0] & \
1300 S390X_CAPBIT(S390X_AES_256)))
1302 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX
*ctx
,
1303 const unsigned char *key
,
1304 const unsigned char *ivec
, int enc
)
1306 S390X_AES_CFB_CTX
*cctx
= EVP_C_DATA(S390X_AES_CFB_CTX
, ctx
);
1307 const unsigned char *iv
= EVP_CIPHER_CTX_original_iv(ctx
);
1308 const int keylen
= EVP_CIPHER_CTX_key_length(ctx
);
1309 const int ivlen
= EVP_CIPHER_CTX_iv_length(ctx
);
1311 cctx
->fc
= S390X_AES_FC(keylen
);
1312 cctx
->fc
|= 16 << 24; /* 16 bytes cipher feedback */
1314 cctx
->fc
|= S390X_DECRYPT
;
1317 memcpy(cctx
->kmf
.param
.cv
, iv
, ivlen
);
1318 memcpy(cctx
->kmf
.param
.k
, key
, keylen
);
1322 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1323 const unsigned char *in
, size_t len
)
1325 S390X_AES_CFB_CTX
*cctx
= EVP_C_DATA(S390X_AES_CFB_CTX
, ctx
);
1326 const int keylen
= EVP_CIPHER_CTX_key_length(ctx
);
1327 const int enc
= EVP_CIPHER_CTX_encrypting(ctx
);
1334 *out
= cctx
->kmf
.param
.cv
[n
] ^ tmp
;
1335 cctx
->kmf
.param
.cv
[n
] = enc
? *out
: tmp
;
1344 len
&= ~(size_t)0xf;
1346 s390x_kmf(in
, len
, out
, cctx
->fc
, &cctx
->kmf
.param
);
1353 s390x_km(cctx
->kmf
.param
.cv
, 16, cctx
->kmf
.param
.cv
,
1354 S390X_AES_FC(keylen
), cctx
->kmf
.param
.k
);
1358 out
[n
] = cctx
->kmf
.param
.cv
[n
] ^ tmp
;
1359 cctx
->kmf
.param
.cv
[n
] = enc
? out
[n
] : tmp
;
1368 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1369 S390X_CAPBIT(S390X_AES_128))
1370 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1371 S390X_CAPBIT(S390X_AES_192))
1372 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1373 S390X_CAPBIT(S390X_AES_256))
1375 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX
*ctx
,
1376 const unsigned char *key
,
1377 const unsigned char *ivec
, int enc
)
1379 S390X_AES_CFB_CTX
*cctx
= EVP_C_DATA(S390X_AES_CFB_CTX
, ctx
);
1380 const unsigned char *iv
= EVP_CIPHER_CTX_original_iv(ctx
);
1381 const int keylen
= EVP_CIPHER_CTX_key_length(ctx
);
1382 const int ivlen
= EVP_CIPHER_CTX_iv_length(ctx
);
1384 cctx
->fc
= S390X_AES_FC(keylen
);
1385 cctx
->fc
|= 1 << 24; /* 1 byte cipher feedback */
1387 cctx
->fc
|= S390X_DECRYPT
;
1389 memcpy(cctx
->kmf
.param
.cv
, iv
, ivlen
);
1390 memcpy(cctx
->kmf
.param
.k
, key
, keylen
);
1394 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1395 const unsigned char *in
, size_t len
)
1397 S390X_AES_CFB_CTX
*cctx
= EVP_C_DATA(S390X_AES_CFB_CTX
, ctx
);
1399 s390x_kmf(in
, len
, out
, cctx
->fc
, &cctx
->kmf
.param
);
1403 # define S390X_aes_128_cfb1_CAPABLE 0
1404 # define S390X_aes_192_cfb1_CAPABLE 0
1405 # define S390X_aes_256_cfb1_CAPABLE 0
1407 # define s390x_aes_cfb1_init_key aes_init_key
1409 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1410 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1411 const unsigned char *in
, size_t len
);
1413 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1414 # define S390X_aes_192_ctr_CAPABLE 1
1415 # define S390X_aes_256_ctr_CAPABLE 1
1416 # define S390X_AES_CTR_CTX EVP_AES_KEY
1418 # define s390x_aes_ctr_init_key aes_init_key
1420 # define s390x_aes_ctr_cipher aes_ctr_cipher
1421 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1422 const unsigned char *in
, size_t len
);
1424 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1425 (OPENSSL_s390xcap_P.kma[0] & \
1426 S390X_CAPBIT(S390X_AES_128)))
1427 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1428 (OPENSSL_s390xcap_P.kma[0] & \
1429 S390X_CAPBIT(S390X_AES_192)))
1430 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1431 (OPENSSL_s390xcap_P.kma[0] & \
1432 S390X_CAPBIT(S390X_AES_256)))
1434 /* iv + padding length for iv lengths != 12 */
1435 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1438 * Process additional authenticated data. Returns 0 on success. Code is
1441 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX
*ctx
, const unsigned char *aad
,
1444 unsigned long long alen
;
1447 if (ctx
->kma
.param
.tpcl
)
1450 alen
= ctx
->kma
.param
.taadl
+ len
;
1451 if (alen
> (U64(1) << 61) || (sizeof(len
) == 8 && alen
< len
))
1453 ctx
->kma
.param
.taadl
= alen
;
1458 ctx
->ares
[n
] = *aad
;
1463 /* ctx->ares contains a complete block if offset has wrapped around */
1465 s390x_kma(ctx
->ares
, 16, NULL
, 0, NULL
, ctx
->fc
, &ctx
->kma
.param
);
1466 ctx
->fc
|= S390X_KMA_HS
;
1473 len
&= ~(size_t)0xf;
1475 s390x_kma(aad
, len
, NULL
, 0, NULL
, ctx
->fc
, &ctx
->kma
.param
);
1477 ctx
->fc
|= S390X_KMA_HS
;
1485 ctx
->ares
[rem
] = aad
[rem
];
1492 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1493 * success. Code is big-endian.
1495 static int s390x_aes_gcm(S390X_AES_GCM_CTX
*ctx
, const unsigned char *in
,
1496 unsigned char *out
, size_t len
)
1498 const unsigned char *inptr
;
1499 unsigned long long mlen
;
1502 unsigned char b
[16];
1507 mlen
= ctx
->kma
.param
.tpcl
+ len
;
1508 if (mlen
> ((U64(1) << 36) - 32) || (sizeof(len
) == 8 && mlen
< len
))
1510 ctx
->kma
.param
.tpcl
= mlen
;
1516 while (n
&& inlen
) {
1517 ctx
->mres
[n
] = *inptr
;
1522 /* ctx->mres contains a complete block if offset has wrapped around */
1524 s390x_kma(ctx
->ares
, ctx
->areslen
, ctx
->mres
, 16, buf
.b
,
1525 ctx
->fc
| S390X_KMA_LAAD
, &ctx
->kma
.param
);
1526 ctx
->fc
|= S390X_KMA_HS
;
1529 /* previous call already encrypted/decrypted its remainder,
1530 * see comment below */
1545 len
&= ~(size_t)0xf;
1547 s390x_kma(ctx
->ares
, ctx
->areslen
, in
, len
, out
,
1548 ctx
->fc
| S390X_KMA_LAAD
, &ctx
->kma
.param
);
1551 ctx
->fc
|= S390X_KMA_HS
;
1556 * If there is a remainder, it has to be saved such that it can be
1557 * processed by kma later. However, we also have to do the for-now
1558 * unauthenticated encryption/decryption part here and now...
1561 if (!ctx
->mreslen
) {
1562 buf
.w
[0] = ctx
->kma
.param
.j0
.w
[0];
1563 buf
.w
[1] = ctx
->kma
.param
.j0
.w
[1];
1564 buf
.w
[2] = ctx
->kma
.param
.j0
.w
[2];
1565 buf
.w
[3] = ctx
->kma
.param
.cv
.w
+ 1;
1566 s390x_km(buf
.b
, 16, ctx
->kres
, ctx
->fc
& 0x1f, &ctx
->kma
.param
.k
);
1570 for (i
= 0; i
< rem
; i
++) {
1571 ctx
->mres
[n
+ i
] = in
[i
];
1572 out
[i
] = in
[i
] ^ ctx
->kres
[n
+ i
];
1575 ctx
->mreslen
+= rem
;
1581 * Initialize context structure. Code is big-endian.
1583 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX
*ctx
,
1584 const unsigned char *iv
)
1586 ctx
->kma
.param
.t
.g
[0] = 0;
1587 ctx
->kma
.param
.t
.g
[1] = 0;
1588 ctx
->kma
.param
.tpcl
= 0;
1589 ctx
->kma
.param
.taadl
= 0;
1594 if (ctx
->ivlen
== 12) {
1595 memcpy(&ctx
->kma
.param
.j0
, iv
, ctx
->ivlen
);
1596 ctx
->kma
.param
.j0
.w
[3] = 1;
1597 ctx
->kma
.param
.cv
.w
= 1;
1599 /* ctx->iv has the right size and is already padded. */
1600 memcpy(ctx
->iv
, iv
, ctx
->ivlen
);
1601 s390x_kma(ctx
->iv
, S390X_gcm_ivpadlen(ctx
->ivlen
), NULL
, 0, NULL
,
1602 ctx
->fc
, &ctx
->kma
.param
);
1603 ctx
->fc
|= S390X_KMA_HS
;
1605 ctx
->kma
.param
.j0
.g
[0] = ctx
->kma
.param
.t
.g
[0];
1606 ctx
->kma
.param
.j0
.g
[1] = ctx
->kma
.param
.t
.g
[1];
1607 ctx
->kma
.param
.cv
.w
= ctx
->kma
.param
.j0
.w
[3];
1608 ctx
->kma
.param
.t
.g
[0] = 0;
1609 ctx
->kma
.param
.t
.g
[1] = 0;
1614 * Performs various operations on the context structure depending on control
1615 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1616 * Code is big-endian.
1618 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
1620 S390X_AES_GCM_CTX
*gctx
= EVP_C_DATA(S390X_AES_GCM_CTX
, c
);
1621 S390X_AES_GCM_CTX
*gctx_out
;
1622 EVP_CIPHER_CTX
*out
;
1623 unsigned char *buf
, *iv
;
1624 int ivlen
, enc
, len
;
1628 ivlen
= EVP_CIPHER_CTX_iv_length(c
);
1629 iv
= EVP_CIPHER_CTX_iv_noconst(c
);
1632 gctx
->ivlen
= ivlen
;
1636 gctx
->tls_aad_len
= -1;
1639 case EVP_CTRL_AEAD_SET_IVLEN
:
1644 iv
= EVP_CIPHER_CTX_iv_noconst(c
);
1645 len
= S390X_gcm_ivpadlen(arg
);
1647 /* Allocate memory for iv if needed. */
1648 if (gctx
->ivlen
== 12 || len
> S390X_gcm_ivpadlen(gctx
->ivlen
)) {
1650 OPENSSL_free(gctx
->iv
);
1652 if ((gctx
->iv
= OPENSSL_malloc(len
)) == NULL
) {
1653 EVPerr(EVP_F_S390X_AES_GCM_CTRL
, ERR_R_MALLOC_FAILURE
);
1658 memset(gctx
->iv
+ arg
, 0, len
- arg
- 8);
1659 *((unsigned long long *)(gctx
->iv
+ len
- 8)) = arg
<< 3;
1664 case EVP_CTRL_AEAD_SET_TAG
:
1665 buf
= EVP_CIPHER_CTX_buf_noconst(c
);
1666 enc
= EVP_CIPHER_CTX_encrypting(c
);
1667 if (arg
<= 0 || arg
> 16 || enc
)
1670 memcpy(buf
, ptr
, arg
);
1674 case EVP_CTRL_AEAD_GET_TAG
:
1675 enc
= EVP_CIPHER_CTX_encrypting(c
);
1676 if (arg
<= 0 || arg
> 16 || !enc
|| gctx
->taglen
< 0)
1679 memcpy(ptr
, gctx
->kma
.param
.t
.b
, arg
);
1682 case EVP_CTRL_GCM_SET_IV_FIXED
:
1683 /* Special case: -1 length restores whole iv */
1685 memcpy(gctx
->iv
, ptr
, gctx
->ivlen
);
1690 * Fixed field must be at least 4 bytes and invocation field at least
1693 if ((arg
< 4) || (gctx
->ivlen
- arg
) < 8)
1697 memcpy(gctx
->iv
, ptr
, arg
);
1699 enc
= EVP_CIPHER_CTX_encrypting(c
);
1700 if (enc
&& RAND_bytes(gctx
->iv
+ arg
, gctx
->ivlen
- arg
) <= 0)
1706 case EVP_CTRL_GCM_IV_GEN
:
1707 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0)
1710 s390x_aes_gcm_setiv(gctx
, gctx
->iv
);
1712 if (arg
<= 0 || arg
> gctx
->ivlen
)
1715 memcpy(ptr
, gctx
->iv
+ gctx
->ivlen
- arg
, arg
);
1717 * Invocation field will be at least 8 bytes in size and so no need
1718 * to check wrap around or increment more than last 8 bytes.
1720 ctr64_inc(gctx
->iv
+ gctx
->ivlen
- 8);
1724 case EVP_CTRL_GCM_SET_IV_INV
:
1725 enc
= EVP_CIPHER_CTX_encrypting(c
);
1726 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0 || enc
)
1729 memcpy(gctx
->iv
+ gctx
->ivlen
- arg
, ptr
, arg
);
1730 s390x_aes_gcm_setiv(gctx
, gctx
->iv
);
1734 case EVP_CTRL_AEAD_TLS1_AAD
:
1735 /* Save the aad for later use. */
1736 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
1739 buf
= EVP_CIPHER_CTX_buf_noconst(c
);
1740 memcpy(buf
, ptr
, arg
);
1741 gctx
->tls_aad_len
= arg
;
1742 gctx
->tls_enc_records
= 0;
1744 len
= buf
[arg
- 2] << 8 | buf
[arg
- 1];
1745 /* Correct length for explicit iv. */
1746 if (len
< EVP_GCM_TLS_EXPLICIT_IV_LEN
)
1748 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
1750 /* If decrypting correct for tag too. */
1751 enc
= EVP_CIPHER_CTX_encrypting(c
);
1753 if (len
< EVP_GCM_TLS_TAG_LEN
)
1755 len
-= EVP_GCM_TLS_TAG_LEN
;
1757 buf
[arg
- 2] = len
>> 8;
1758 buf
[arg
- 1] = len
& 0xff;
1759 /* Extra padding: tag appended to record. */
1760 return EVP_GCM_TLS_TAG_LEN
;
1764 gctx_out
= EVP_C_DATA(S390X_AES_GCM_CTX
, out
);
1765 iv
= EVP_CIPHER_CTX_iv_noconst(c
);
1767 if (gctx
->iv
== iv
) {
1768 gctx_out
->iv
= EVP_CIPHER_CTX_iv_noconst(out
);
1770 len
= S390X_gcm_ivpadlen(gctx
->ivlen
);
1772 if ((gctx_out
->iv
= OPENSSL_malloc(len
)) == NULL
) {
1773 EVPerr(EVP_F_S390X_AES_GCM_CTRL
, ERR_R_MALLOC_FAILURE
);
1777 memcpy(gctx_out
->iv
, gctx
->iv
, len
);
1787 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1789 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX
*ctx
,
1790 const unsigned char *key
,
1791 const unsigned char *iv
, int enc
)
1793 S390X_AES_GCM_CTX
*gctx
= EVP_C_DATA(S390X_AES_GCM_CTX
, ctx
);
1796 if (iv
== NULL
&& key
== NULL
)
1800 keylen
= EVP_CIPHER_CTX_key_length(ctx
);
1801 memcpy(&gctx
->kma
.param
.k
, key
, keylen
);
1803 gctx
->fc
= S390X_AES_FC(keylen
);
1805 gctx
->fc
|= S390X_DECRYPT
;
1807 if (iv
== NULL
&& gctx
->iv_set
)
1811 s390x_aes_gcm_setiv(gctx
, iv
);
1817 s390x_aes_gcm_setiv(gctx
, iv
);
1819 memcpy(gctx
->iv
, iv
, gctx
->ivlen
);
1828 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1829 * if successful. Otherwise -1 is returned. Code is big-endian.
1831 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1832 const unsigned char *in
, size_t len
)
1834 S390X_AES_GCM_CTX
*gctx
= EVP_C_DATA(S390X_AES_GCM_CTX
, ctx
);
1835 const unsigned char *buf
= EVP_CIPHER_CTX_buf_noconst(ctx
);
1836 const int enc
= EVP_CIPHER_CTX_encrypting(ctx
);
1839 if (out
!= in
|| len
< (EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
))
1843 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1844 * Requirements from SP 800-38D". The requirements is for one party to the
1845 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1848 if (ctx
->encrypt
&& ++gctx
->tls_enc_records
== 0) {
1849 EVPerr(EVP_F_S390X_AES_GCM_TLS_CIPHER
, EVP_R_TOO_MANY_RECORDS
);
1853 if (EVP_CIPHER_CTX_ctrl(ctx
, enc
? EVP_CTRL_GCM_IV_GEN
1854 : EVP_CTRL_GCM_SET_IV_INV
,
1855 EVP_GCM_TLS_EXPLICIT_IV_LEN
, out
) <= 0)
1858 in
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
1859 out
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
1860 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
1862 gctx
->kma
.param
.taadl
= gctx
->tls_aad_len
<< 3;
1863 gctx
->kma
.param
.tpcl
= len
<< 3;
1864 s390x_kma(buf
, gctx
->tls_aad_len
, in
, len
, out
,
1865 gctx
->fc
| S390X_KMA_LAAD
| S390X_KMA_LPC
, &gctx
->kma
.param
);
1868 memcpy(out
+ len
, gctx
->kma
.param
.t
.b
, EVP_GCM_TLS_TAG_LEN
);
1869 rv
= len
+ EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
1871 if (CRYPTO_memcmp(gctx
->kma
.param
.t
.b
, in
+ len
,
1872 EVP_GCM_TLS_TAG_LEN
)) {
1873 OPENSSL_cleanse(out
, len
);
1880 gctx
->tls_aad_len
= -1;
1885 * Called from EVP layer to initialize context, process additional
1886 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1887 * ciphertext or process a TLS packet, depending on context. Returns bytes
1888 * written on success. Otherwise -1 is returned. Code is big-endian.
1890 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1891 const unsigned char *in
, size_t len
)
1893 S390X_AES_GCM_CTX
*gctx
= EVP_C_DATA(S390X_AES_GCM_CTX
, ctx
);
1894 unsigned char *buf
, tmp
[16];
1900 if (gctx
->tls_aad_len
>= 0)
1901 return s390x_aes_gcm_tls_cipher(ctx
, out
, in
, len
);
1908 if (s390x_aes_gcm_aad(gctx
, in
, len
))
1911 if (s390x_aes_gcm(gctx
, in
, out
, len
))
1916 gctx
->kma
.param
.taadl
<<= 3;
1917 gctx
->kma
.param
.tpcl
<<= 3;
1918 s390x_kma(gctx
->ares
, gctx
->areslen
, gctx
->mres
, gctx
->mreslen
, tmp
,
1919 gctx
->fc
| S390X_KMA_LAAD
| S390X_KMA_LPC
, &gctx
->kma
.param
);
1920 /* recall that we already did en-/decrypt gctx->mres
1921 * and returned it to caller... */
1922 OPENSSL_cleanse(tmp
, gctx
->mreslen
);
1925 enc
= EVP_CIPHER_CTX_encrypting(ctx
);
1929 if (gctx
->taglen
< 0)
1932 buf
= EVP_CIPHER_CTX_buf_noconst(ctx
);
1933 if (CRYPTO_memcmp(buf
, gctx
->kma
.param
.t
.b
, gctx
->taglen
))
1940 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX
*c
)
1942 S390X_AES_GCM_CTX
*gctx
= EVP_C_DATA(S390X_AES_GCM_CTX
, c
);
1943 const unsigned char *iv
;
1948 iv
= EVP_CIPHER_CTX_iv(c
);
1950 OPENSSL_free(gctx
->iv
);
1952 OPENSSL_cleanse(gctx
, sizeof(*gctx
));
1956 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1957 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1958 # define S390X_aes_256_xts_CAPABLE 1
1960 # define s390x_aes_xts_init_key aes_xts_init_key
1961 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX
*ctx
,
1962 const unsigned char *key
,
1963 const unsigned char *iv
, int enc
);
1964 # define s390x_aes_xts_cipher aes_xts_cipher
1965 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
1966 const unsigned char *in
, size_t len
);
1967 # define s390x_aes_xts_ctrl aes_xts_ctrl
1968 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX
*, int type
, int arg
, void *ptr
);
1969 # define s390x_aes_xts_cleanup aes_xts_cleanup
1971 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1972 (OPENSSL_s390xcap_P.kmac[0] & \
1973 S390X_CAPBIT(S390X_AES_128)))
1974 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1975 (OPENSSL_s390xcap_P.kmac[0] & \
1976 S390X_CAPBIT(S390X_AES_192)))
1977 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1978 (OPENSSL_s390xcap_P.kmac[0] & \
1979 S390X_CAPBIT(S390X_AES_256)))
1981 # define S390X_CCM_AAD_FLAG 0x40
1984 * Set nonce and length fields. Code is big-endian.
1986 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX
*ctx
,
1987 const unsigned char *nonce
,
1990 ctx
->aes
.ccm
.nonce
.b
[0] &= ~S390X_CCM_AAD_FLAG
;
1991 ctx
->aes
.ccm
.nonce
.g
[1] = mlen
;
1992 memcpy(ctx
->aes
.ccm
.nonce
.b
+ 1, nonce
, 15 - ctx
->aes
.ccm
.l
);
1996 * Process additional authenticated data. Code is big-endian.
1998 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX
*ctx
, const unsigned char *aad
,
2007 ctx
->aes
.ccm
.nonce
.b
[0] |= S390X_CCM_AAD_FLAG
;
2009 /* Suppress 'type-punned pointer dereference' warning. */
2010 ptr
= ctx
->aes
.ccm
.buf
.b
;
2012 if (alen
< ((1 << 16) - (1 << 8))) {
2013 *(uint16_t *)ptr
= alen
;
2015 } else if (sizeof(alen
) == 8
2016 && alen
>= (size_t)1 << (32 % (sizeof(alen
) * 8))) {
2017 *(uint16_t *)ptr
= 0xffff;
2018 *(uint64_t *)(ptr
+ 2) = alen
;
2021 *(uint16_t *)ptr
= 0xfffe;
2022 *(uint32_t *)(ptr
+ 2) = alen
;
2026 while (i
< 16 && alen
) {
2027 ctx
->aes
.ccm
.buf
.b
[i
] = *aad
;
2033 ctx
->aes
.ccm
.buf
.b
[i
] = 0;
2037 ctx
->aes
.ccm
.kmac_param
.icv
.g
[0] = 0;
2038 ctx
->aes
.ccm
.kmac_param
.icv
.g
[1] = 0;
2039 s390x_kmac(ctx
->aes
.ccm
.nonce
.b
, 32, ctx
->aes
.ccm
.fc
,
2040 &ctx
->aes
.ccm
.kmac_param
);
2041 ctx
->aes
.ccm
.blocks
+= 2;
2044 alen
&= ~(size_t)0xf;
2046 s390x_kmac(aad
, alen
, ctx
->aes
.ccm
.fc
, &ctx
->aes
.ccm
.kmac_param
);
2047 ctx
->aes
.ccm
.blocks
+= alen
>> 4;
2051 for (i
= 0; i
< rem
; i
++)
2052 ctx
->aes
.ccm
.kmac_param
.icv
.b
[i
] ^= aad
[i
];
2054 s390x_km(ctx
->aes
.ccm
.kmac_param
.icv
.b
, 16,
2055 ctx
->aes
.ccm
.kmac_param
.icv
.b
, ctx
->aes
.ccm
.fc
,
2056 ctx
->aes
.ccm
.kmac_param
.k
);
2057 ctx
->aes
.ccm
.blocks
++;
2062 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2065 static int s390x_aes_ccm(S390X_AES_CCM_CTX
*ctx
, const unsigned char *in
,
2066 unsigned char *out
, size_t len
, int enc
)
2069 unsigned int i
, l
, num
;
2070 unsigned char flags
;
2072 flags
= ctx
->aes
.ccm
.nonce
.b
[0];
2073 if (!(flags
& S390X_CCM_AAD_FLAG
)) {
2074 s390x_km(ctx
->aes
.ccm
.nonce
.b
, 16, ctx
->aes
.ccm
.kmac_param
.icv
.b
,
2075 ctx
->aes
.ccm
.fc
, ctx
->aes
.ccm
.kmac_param
.k
);
2076 ctx
->aes
.ccm
.blocks
++;
2079 ctx
->aes
.ccm
.nonce
.b
[0] = l
;
2082 * Reconstruct length from encoded length field
2083 * and initialize it with counter value.
2086 for (i
= 15 - l
; i
< 15; i
++) {
2087 n
|= ctx
->aes
.ccm
.nonce
.b
[i
];
2088 ctx
->aes
.ccm
.nonce
.b
[i
] = 0;
2091 n
|= ctx
->aes
.ccm
.nonce
.b
[15];
2092 ctx
->aes
.ccm
.nonce
.b
[15] = 1;
2095 return -1; /* length mismatch */
2098 /* Two operations per block plus one for tag encryption */
2099 ctx
->aes
.ccm
.blocks
+= (((len
+ 15) >> 4) << 1) + 1;
2100 if (ctx
->aes
.ccm
.blocks
> (1ULL << 61))
2101 return -2; /* too much data */
2106 len
&= ~(size_t)0xf;
2109 /* mac-then-encrypt */
2111 s390x_kmac(in
, len
, ctx
->aes
.ccm
.fc
, &ctx
->aes
.ccm
.kmac_param
);
2113 for (i
= 0; i
< rem
; i
++)
2114 ctx
->aes
.ccm
.kmac_param
.icv
.b
[i
] ^= in
[len
+ i
];
2116 s390x_km(ctx
->aes
.ccm
.kmac_param
.icv
.b
, 16,
2117 ctx
->aes
.ccm
.kmac_param
.icv
.b
, ctx
->aes
.ccm
.fc
,
2118 ctx
->aes
.ccm
.kmac_param
.k
);
2121 CRYPTO_ctr128_encrypt_ctr32(in
, out
, len
+ rem
, &ctx
->aes
.key
.k
,
2122 ctx
->aes
.ccm
.nonce
.b
, ctx
->aes
.ccm
.buf
.b
,
2123 &num
, (ctr128_f
)AES_ctr32_encrypt
);
2125 /* decrypt-then-mac */
2126 CRYPTO_ctr128_encrypt_ctr32(in
, out
, len
+ rem
, &ctx
->aes
.key
.k
,
2127 ctx
->aes
.ccm
.nonce
.b
, ctx
->aes
.ccm
.buf
.b
,
2128 &num
, (ctr128_f
)AES_ctr32_encrypt
);
2131 s390x_kmac(out
, len
, ctx
->aes
.ccm
.fc
, &ctx
->aes
.ccm
.kmac_param
);
2133 for (i
= 0; i
< rem
; i
++)
2134 ctx
->aes
.ccm
.kmac_param
.icv
.b
[i
] ^= out
[len
+ i
];
2136 s390x_km(ctx
->aes
.ccm
.kmac_param
.icv
.b
, 16,
2137 ctx
->aes
.ccm
.kmac_param
.icv
.b
, ctx
->aes
.ccm
.fc
,
2138 ctx
->aes
.ccm
.kmac_param
.k
);
2142 for (i
= 15 - l
; i
< 16; i
++)
2143 ctx
->aes
.ccm
.nonce
.b
[i
] = 0;
2145 s390x_km(ctx
->aes
.ccm
.nonce
.b
, 16, ctx
->aes
.ccm
.buf
.b
, ctx
->aes
.ccm
.fc
,
2146 ctx
->aes
.ccm
.kmac_param
.k
);
2147 ctx
->aes
.ccm
.kmac_param
.icv
.g
[0] ^= ctx
->aes
.ccm
.buf
.g
[0];
2148 ctx
->aes
.ccm
.kmac_param
.icv
.g
[1] ^= ctx
->aes
.ccm
.buf
.g
[1];
2150 ctx
->aes
.ccm
.nonce
.b
[0] = flags
; /* restore flags field */
2155 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2156 * if successful. Otherwise -1 is returned.
2158 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
2159 const unsigned char *in
, size_t len
)
2161 S390X_AES_CCM_CTX
*cctx
= EVP_C_DATA(S390X_AES_CCM_CTX
, ctx
);
2162 unsigned char *ivec
= EVP_CIPHER_CTX_iv_noconst(ctx
);
2163 unsigned char *buf
= EVP_CIPHER_CTX_buf_noconst(ctx
);
2164 const int enc
= EVP_CIPHER_CTX_encrypting(ctx
);
2167 || len
< (EVP_CCM_TLS_EXPLICIT_IV_LEN
+ (size_t)cctx
->aes
.ccm
.m
))
2171 /* Set explicit iv (sequence number). */
2172 memcpy(out
, buf
, EVP_CCM_TLS_EXPLICIT_IV_LEN
);
2175 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->aes
.ccm
.m
;
2177 * Get explicit iv (sequence number). We already have fixed iv
2178 * (server/client_write_iv) here.
2180 memcpy(ivec
+ EVP_CCM_TLS_FIXED_IV_LEN
, in
, EVP_CCM_TLS_EXPLICIT_IV_LEN
);
2181 s390x_aes_ccm_setiv(cctx
, ivec
, len
);
2183 /* Process aad (sequence number|type|version|length) */
2184 s390x_aes_ccm_aad(cctx
, buf
, cctx
->aes
.ccm
.tls_aad_len
);
2186 in
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
2187 out
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
2190 if (s390x_aes_ccm(cctx
, in
, out
, len
, enc
))
2193 memcpy(out
+ len
, cctx
->aes
.ccm
.kmac_param
.icv
.b
, cctx
->aes
.ccm
.m
);
2194 return len
+ EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->aes
.ccm
.m
;
2196 if (!s390x_aes_ccm(cctx
, in
, out
, len
, enc
)) {
2197 if (!CRYPTO_memcmp(cctx
->aes
.ccm
.kmac_param
.icv
.b
, in
+ len
,
2202 OPENSSL_cleanse(out
, len
);
2208 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2211 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX
*ctx
,
2212 const unsigned char *key
,
2213 const unsigned char *iv
, int enc
)
2215 S390X_AES_CCM_CTX
*cctx
= EVP_C_DATA(S390X_AES_CCM_CTX
, ctx
);
2216 unsigned char *ivec
;
2219 if (iv
== NULL
&& key
== NULL
)
2223 keylen
= EVP_CIPHER_CTX_key_length(ctx
);
2224 cctx
->aes
.ccm
.fc
= S390X_AES_FC(keylen
);
2225 memcpy(cctx
->aes
.ccm
.kmac_param
.k
, key
, keylen
);
2227 /* Store encoded m and l. */
2228 cctx
->aes
.ccm
.nonce
.b
[0] = ((cctx
->aes
.ccm
.l
- 1) & 0x7)
2229 | (((cctx
->aes
.ccm
.m
- 2) >> 1) & 0x7) << 3;
2230 memset(cctx
->aes
.ccm
.nonce
.b
+ 1, 0,
2231 sizeof(cctx
->aes
.ccm
.nonce
.b
));
2232 cctx
->aes
.ccm
.blocks
= 0;
2234 cctx
->aes
.ccm
.key_set
= 1;
2238 ivec
= EVP_CIPHER_CTX_iv_noconst(ctx
);
2239 memcpy(ivec
, iv
, 15 - cctx
->aes
.ccm
.l
);
2241 cctx
->aes
.ccm
.iv_set
= 1;
2248 * Called from EVP layer to initialize context, process additional
2249 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2250 * plaintext or process a TLS packet, depending on context. Returns bytes
2251 * written on success. Otherwise -1 is returned.
2253 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
2254 const unsigned char *in
, size_t len
)
2256 S390X_AES_CCM_CTX
*cctx
= EVP_C_DATA(S390X_AES_CCM_CTX
, ctx
);
2257 const int enc
= EVP_CIPHER_CTX_encrypting(ctx
);
2259 unsigned char *buf
, *ivec
;
2261 if (!cctx
->aes
.ccm
.key_set
)
2264 if (cctx
->aes
.ccm
.tls_aad_len
>= 0)
2265 return s390x_aes_ccm_tls_cipher(ctx
, out
, in
, len
);
2268 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2269 * so integrity must be checked already at Update() i.e., before
2270 * potentially corrupted data is output.
2272 if (in
== NULL
&& out
!= NULL
)
2275 if (!cctx
->aes
.ccm
.iv_set
)
2279 /* Update(): Pass message length. */
2281 ivec
= EVP_CIPHER_CTX_iv_noconst(ctx
);
2282 s390x_aes_ccm_setiv(cctx
, ivec
, len
);
2284 cctx
->aes
.ccm
.len_set
= 1;
2288 /* Update(): Process aad. */
2289 if (!cctx
->aes
.ccm
.len_set
&& len
)
2292 s390x_aes_ccm_aad(cctx
, in
, len
);
2296 /* The tag must be set before actually decrypting data */
2297 if (!enc
&& !cctx
->aes
.ccm
.tag_set
)
2300 /* Update(): Process message. */
2302 if (!cctx
->aes
.ccm
.len_set
) {
2304 * In case message length was not previously set explicitly via
2305 * Update(), set it now.
2307 ivec
= EVP_CIPHER_CTX_iv_noconst(ctx
);
2308 s390x_aes_ccm_setiv(cctx
, ivec
, len
);
2310 cctx
->aes
.ccm
.len_set
= 1;
2314 if (s390x_aes_ccm(cctx
, in
, out
, len
, enc
))
2317 cctx
->aes
.ccm
.tag_set
= 1;
2322 if (!s390x_aes_ccm(cctx
, in
, out
, len
, enc
)) {
2323 buf
= EVP_CIPHER_CTX_buf_noconst(ctx
);
2324 if (!CRYPTO_memcmp(cctx
->aes
.ccm
.kmac_param
.icv
.b
, buf
,
2330 OPENSSL_cleanse(out
, len
);
2332 cctx
->aes
.ccm
.iv_set
= 0;
2333 cctx
->aes
.ccm
.tag_set
= 0;
2334 cctx
->aes
.ccm
.len_set
= 0;
2340 * Performs various operations on the context structure depending on control
2341 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2342 * Code is big-endian.
2344 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
2346 S390X_AES_CCM_CTX
*cctx
= EVP_C_DATA(S390X_AES_CCM_CTX
, c
);
2347 unsigned char *buf
, *iv
;
2352 cctx
->aes
.ccm
.key_set
= 0;
2353 cctx
->aes
.ccm
.iv_set
= 0;
2354 cctx
->aes
.ccm
.l
= 8;
2355 cctx
->aes
.ccm
.m
= 12;
2356 cctx
->aes
.ccm
.tag_set
= 0;
2357 cctx
->aes
.ccm
.len_set
= 0;
2358 cctx
->aes
.ccm
.tls_aad_len
= -1;
2361 case EVP_CTRL_AEAD_TLS1_AAD
:
2362 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
2365 /* Save the aad for later use. */
2366 buf
= EVP_CIPHER_CTX_buf_noconst(c
);
2367 memcpy(buf
, ptr
, arg
);
2368 cctx
->aes
.ccm
.tls_aad_len
= arg
;
2370 len
= buf
[arg
- 2] << 8 | buf
[arg
- 1];
2371 if (len
< EVP_CCM_TLS_EXPLICIT_IV_LEN
)
2374 /* Correct length for explicit iv. */
2375 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
2377 enc
= EVP_CIPHER_CTX_encrypting(c
);
2379 if (len
< cctx
->aes
.ccm
.m
)
2382 /* Correct length for tag. */
2383 len
-= cctx
->aes
.ccm
.m
;
2386 buf
[arg
- 2] = len
>> 8;
2387 buf
[arg
- 1] = len
& 0xff;
2389 /* Extra padding: tag appended to record. */
2390 return cctx
->aes
.ccm
.m
;
2392 case EVP_CTRL_CCM_SET_IV_FIXED
:
2393 if (arg
!= EVP_CCM_TLS_FIXED_IV_LEN
)
2396 /* Copy to first part of the iv. */
2397 iv
= EVP_CIPHER_CTX_iv_noconst(c
);
2398 memcpy(iv
, ptr
, arg
);
2401 case EVP_CTRL_AEAD_SET_IVLEN
:
2405 case EVP_CTRL_CCM_SET_L
:
2406 if (arg
< 2 || arg
> 8)
2409 cctx
->aes
.ccm
.l
= arg
;
2412 case EVP_CTRL_AEAD_SET_TAG
:
2413 if ((arg
& 1) || arg
< 4 || arg
> 16)
2416 enc
= EVP_CIPHER_CTX_encrypting(c
);
2421 cctx
->aes
.ccm
.tag_set
= 1;
2422 buf
= EVP_CIPHER_CTX_buf_noconst(c
);
2423 memcpy(buf
, ptr
, arg
);
2426 cctx
->aes
.ccm
.m
= arg
;
2429 case EVP_CTRL_AEAD_GET_TAG
:
2430 enc
= EVP_CIPHER_CTX_encrypting(c
);
2431 if (!enc
|| !cctx
->aes
.ccm
.tag_set
)
2434 if(arg
< cctx
->aes
.ccm
.m
)
2437 memcpy(ptr
, cctx
->aes
.ccm
.kmac_param
.icv
.b
, cctx
->aes
.ccm
.m
);
2438 cctx
->aes
.ccm
.tag_set
= 0;
2439 cctx
->aes
.ccm
.iv_set
= 0;
2440 cctx
->aes
.ccm
.len_set
= 0;
2451 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2453 # ifndef OPENSSL_NO_OCB
2454 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2455 # define S390X_aes_128_ocb_CAPABLE 0
2456 # define S390X_aes_192_ocb_CAPABLE 0
2457 # define S390X_aes_256_ocb_CAPABLE 0
2459 # define s390x_aes_ocb_init_key aes_ocb_init_key
2460 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
2461 const unsigned char *iv
, int enc
);
2462 # define s390x_aes_ocb_cipher aes_ocb_cipher
2463 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
2464 const unsigned char *in
, size_t len
);
2465 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2466 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX
*);
2467 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2468 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX
*, int type
, int arg
, void *ptr
);
2471 # ifndef OPENSSL_NO_SIV
2472 # define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
2473 # define S390X_aes_128_siv_CAPABLE 0
2474 # define S390X_aes_192_siv_CAPABLE 0
2475 # define S390X_aes_256_siv_CAPABLE 0
2477 # define s390x_aes_siv_init_key aes_siv_init_key
2478 # define s390x_aes_siv_cipher aes_siv_cipher
2479 # define s390x_aes_siv_cleanup aes_siv_cleanup
2480 # define s390x_aes_siv_ctrl aes_siv_ctrl
2483 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2485 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2486 nid##_##keylen##_##nmode,blocksize, \
2489 flags | EVP_CIPH_##MODE##_MODE, \
2490 s390x_aes_##mode##_init_key, \
2491 s390x_aes_##mode##_cipher, \
2493 sizeof(S390X_AES_##MODE##_CTX), \
2499 static const EVP_CIPHER aes_##keylen##_##mode = { \
2500 nid##_##keylen##_##nmode, \
2504 flags | EVP_CIPH_##MODE##_MODE, \
2506 aes_##mode##_cipher, \
2508 sizeof(EVP_AES_KEY), \
2514 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2516 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2517 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2520 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2521 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2522 nid##_##keylen##_##mode, \
2524 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2526 flags | EVP_CIPH_##MODE##_MODE, \
2527 s390x_aes_##mode##_init_key, \
2528 s390x_aes_##mode##_cipher, \
2529 s390x_aes_##mode##_cleanup, \
2530 sizeof(S390X_AES_##MODE##_CTX), \
2533 s390x_aes_##mode##_ctrl, \
2536 static const EVP_CIPHER aes_##keylen##_##mode = { \
2537 nid##_##keylen##_##mode,blocksize, \
2538 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2540 flags | EVP_CIPH_##MODE##_MODE, \
2541 aes_##mode##_init_key, \
2542 aes_##mode##_cipher, \
2543 aes_##mode##_cleanup, \
2544 sizeof(EVP_AES_##MODE##_CTX), \
2547 aes_##mode##_ctrl, \
2550 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2552 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2553 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2558 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2559 static const EVP_CIPHER aes_##keylen##_##mode = { \
2560 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2561 flags|EVP_CIPH_##MODE##_MODE, \
2563 aes_##mode##_cipher, \
2565 sizeof(EVP_AES_KEY), \
2566 NULL,NULL,NULL,NULL }; \
2567 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2568 { return &aes_##keylen##_##mode; }
2570 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2571 static const EVP_CIPHER aes_##keylen##_##mode = { \
2572 nid##_##keylen##_##mode,blocksize, \
2573 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2575 flags|EVP_CIPH_##MODE##_MODE, \
2576 aes_##mode##_init_key, \
2577 aes_##mode##_cipher, \
2578 aes_##mode##_cleanup, \
2579 sizeof(EVP_AES_##MODE##_CTX), \
2580 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2581 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2582 { return &aes_##keylen##_##mode; }
2586 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2587 # include "arm_arch.h"
2588 # if __ARM_MAX_ARCH__>=7
2589 # if defined(BSAES_ASM)
2590 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2592 # if defined(VPAES_ASM)
2593 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2595 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2596 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2597 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2598 # define HWAES_encrypt aes_v8_encrypt
2599 # define HWAES_decrypt aes_v8_decrypt
2600 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2601 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2605 #if defined(HWAES_CAPABLE)
2606 int HWAES_set_encrypt_key(const unsigned char *userKey
, const int bits
,
2608 int HWAES_set_decrypt_key(const unsigned char *userKey
, const int bits
,
2610 void HWAES_encrypt(const unsigned char *in
, unsigned char *out
,
2611 const AES_KEY
*key
);
2612 void HWAES_decrypt(const unsigned char *in
, unsigned char *out
,
2613 const AES_KEY
*key
);
2614 void HWAES_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
2615 size_t length
, const AES_KEY
*key
,
2616 unsigned char *ivec
, const int enc
);
2617 void HWAES_ctr32_encrypt_blocks(const unsigned char *in
, unsigned char *out
,
2618 size_t len
, const AES_KEY
*key
,
2619 const unsigned char ivec
[16]);
2620 void HWAES_xts_encrypt(const unsigned char *inp
, unsigned char *out
,
2621 size_t len
, const AES_KEY
*key1
,
2622 const AES_KEY
*key2
, const unsigned char iv
[16]);
2623 void HWAES_xts_decrypt(const unsigned char *inp
, unsigned char *out
,
2624 size_t len
, const AES_KEY
*key1
,
2625 const AES_KEY
*key2
, const unsigned char iv
[16]);
2628 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2629 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2630 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2631 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2632 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2633 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2634 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2635 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2637 static int aes_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
2638 const unsigned char *iv
, int enc
)
2641 EVP_AES_KEY
*dat
= EVP_C_DATA(EVP_AES_KEY
,ctx
);
2643 mode
= EVP_CIPHER_CTX_mode(ctx
);
2644 if ((mode
== EVP_CIPH_ECB_MODE
|| mode
== EVP_CIPH_CBC_MODE
)
2646 #ifdef HWAES_CAPABLE
2647 if (HWAES_CAPABLE
) {
2648 ret
= HWAES_set_decrypt_key(key
,
2649 EVP_CIPHER_CTX_key_length(ctx
) * 8,
2651 dat
->block
= (block128_f
) HWAES_decrypt
;
2652 dat
->stream
.cbc
= NULL
;
2653 # ifdef HWAES_cbc_encrypt
2654 if (mode
== EVP_CIPH_CBC_MODE
)
2655 dat
->stream
.cbc
= (cbc128_f
) HWAES_cbc_encrypt
;
2659 #ifdef BSAES_CAPABLE
2660 if (BSAES_CAPABLE
&& mode
== EVP_CIPH_CBC_MODE
) {
2661 ret
= AES_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
2663 dat
->block
= (block128_f
) AES_decrypt
;
2664 dat
->stream
.cbc
= (cbc128_f
) bsaes_cbc_encrypt
;
2667 #ifdef VPAES_CAPABLE
2668 if (VPAES_CAPABLE
) {
2669 ret
= vpaes_set_decrypt_key(key
,
2670 EVP_CIPHER_CTX_key_length(ctx
) * 8,
2672 dat
->block
= (block128_f
) vpaes_decrypt
;
2673 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
2674 (cbc128_f
) vpaes_cbc_encrypt
: NULL
;
2678 ret
= AES_set_decrypt_key(key
,
2679 EVP_CIPHER_CTX_key_length(ctx
) * 8,
2681 dat
->block
= (block128_f
) AES_decrypt
;
2682 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
2683 (cbc128_f
) AES_cbc_encrypt
: NULL
;
2686 #ifdef HWAES_CAPABLE
2687 if (HWAES_CAPABLE
) {
2688 ret
= HWAES_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
2690 dat
->block
= (block128_f
) HWAES_encrypt
;
2691 dat
->stream
.cbc
= NULL
;
2692 # ifdef HWAES_cbc_encrypt
2693 if (mode
== EVP_CIPH_CBC_MODE
)
2694 dat
->stream
.cbc
= (cbc128_f
) HWAES_cbc_encrypt
;
2697 # ifdef HWAES_ctr32_encrypt_blocks
2698 if (mode
== EVP_CIPH_CTR_MODE
)
2699 dat
->stream
.ctr
= (ctr128_f
) HWAES_ctr32_encrypt_blocks
;
2702 (void)0; /* terminate potentially open 'else' */
2705 #ifdef BSAES_CAPABLE
2706 if (BSAES_CAPABLE
&& mode
== EVP_CIPH_CTR_MODE
) {
2707 ret
= AES_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
2709 dat
->block
= (block128_f
) AES_encrypt
;
2710 dat
->stream
.ctr
= (ctr128_f
) bsaes_ctr32_encrypt_blocks
;
2713 #ifdef VPAES_CAPABLE
2714 if (VPAES_CAPABLE
) {
2715 ret
= vpaes_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
2717 dat
->block
= (block128_f
) vpaes_encrypt
;
2718 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
2719 (cbc128_f
) vpaes_cbc_encrypt
: NULL
;
2723 ret
= AES_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
2725 dat
->block
= (block128_f
) AES_encrypt
;
2726 dat
->stream
.cbc
= mode
== EVP_CIPH_CBC_MODE
?
2727 (cbc128_f
) AES_cbc_encrypt
: NULL
;
2729 if (mode
== EVP_CIPH_CTR_MODE
)
2730 dat
->stream
.ctr
= (ctr128_f
) AES_ctr32_encrypt
;
2735 EVPerr(EVP_F_AES_INIT_KEY
, EVP_R_AES_KEY_SETUP_FAILED
);
2742 static int aes_cbc_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
2743 const unsigned char *in
, size_t len
)
2745 EVP_AES_KEY
*dat
= EVP_C_DATA(EVP_AES_KEY
,ctx
);
2747 if (dat
->stream
.cbc
)
2748 (*dat
->stream
.cbc
) (in
, out
, len
, &dat
->ks
,
2749 EVP_CIPHER_CTX_iv_noconst(ctx
),
2750 EVP_CIPHER_CTX_encrypting(ctx
));
2751 else if (EVP_CIPHER_CTX_encrypting(ctx
))
2752 CRYPTO_cbc128_encrypt(in
, out
, len
, &dat
->ks
,
2753 EVP_CIPHER_CTX_iv_noconst(ctx
), dat
->block
);
2755 CRYPTO_cbc128_decrypt(in
, out
, len
, &dat
->ks
,
2756 EVP_CIPHER_CTX_iv_noconst(ctx
), dat
->block
);
2761 static int aes_ecb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
2762 const unsigned char *in
, size_t len
)
2764 size_t bl
= EVP_CIPHER_CTX_block_size(ctx
);
2766 EVP_AES_KEY
*dat
= EVP_C_DATA(EVP_AES_KEY
,ctx
);
2771 for (i
= 0, len
-= bl
; i
<= len
; i
+= bl
)
2772 (*dat
->block
) (in
+ i
, out
+ i
, &dat
->ks
);
2777 static int aes_ofb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
2778 const unsigned char *in
, size_t len
)
2780 EVP_AES_KEY
*dat
= EVP_C_DATA(EVP_AES_KEY
,ctx
);
2782 int num
= EVP_CIPHER_CTX_num(ctx
);
2783 CRYPTO_ofb128_encrypt(in
, out
, len
, &dat
->ks
,
2784 EVP_CIPHER_CTX_iv_noconst(ctx
), &num
, dat
->block
);
2785 EVP_CIPHER_CTX_set_num(ctx
, num
);
2789 static int aes_cfb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
2790 const unsigned char *in
, size_t len
)
2792 EVP_AES_KEY
*dat
= EVP_C_DATA(EVP_AES_KEY
,ctx
);
2794 int num
= EVP_CIPHER_CTX_num(ctx
);
2795 CRYPTO_cfb128_encrypt(in
, out
, len
, &dat
->ks
,
2796 EVP_CIPHER_CTX_iv_noconst(ctx
), &num
,
2797 EVP_CIPHER_CTX_encrypting(ctx
), dat
->block
);
2798 EVP_CIPHER_CTX_set_num(ctx
, num
);
2802 static int aes_cfb8_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
2803 const unsigned char *in
, size_t len
)
2805 EVP_AES_KEY
*dat
= EVP_C_DATA(EVP_AES_KEY
,ctx
);
2807 int num
= EVP_CIPHER_CTX_num(ctx
);
2808 CRYPTO_cfb128_8_encrypt(in
, out
, len
, &dat
->ks
,
2809 EVP_CIPHER_CTX_iv_noconst(ctx
), &num
,
2810 EVP_CIPHER_CTX_encrypting(ctx
), dat
->block
);
2811 EVP_CIPHER_CTX_set_num(ctx
, num
);
2815 static int aes_cfb1_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
2816 const unsigned char *in
, size_t len
)
2818 EVP_AES_KEY
*dat
= EVP_C_DATA(EVP_AES_KEY
,ctx
);
2820 if (EVP_CIPHER_CTX_test_flags(ctx
, EVP_CIPH_FLAG_LENGTH_BITS
)) {
2821 int num
= EVP_CIPHER_CTX_num(ctx
);
2822 CRYPTO_cfb128_1_encrypt(in
, out
, len
, &dat
->ks
,
2823 EVP_CIPHER_CTX_iv_noconst(ctx
), &num
,
2824 EVP_CIPHER_CTX_encrypting(ctx
), dat
->block
);
2825 EVP_CIPHER_CTX_set_num(ctx
, num
);
2829 while (len
>= MAXBITCHUNK
) {
2830 int num
= EVP_CIPHER_CTX_num(ctx
);
2831 CRYPTO_cfb128_1_encrypt(in
, out
, MAXBITCHUNK
* 8, &dat
->ks
,
2832 EVP_CIPHER_CTX_iv_noconst(ctx
), &num
,
2833 EVP_CIPHER_CTX_encrypting(ctx
), dat
->block
);
2834 EVP_CIPHER_CTX_set_num(ctx
, num
);
2840 int num
= EVP_CIPHER_CTX_num(ctx
);
2841 CRYPTO_cfb128_1_encrypt(in
, out
, len
* 8, &dat
->ks
,
2842 EVP_CIPHER_CTX_iv_noconst(ctx
), &num
,
2843 EVP_CIPHER_CTX_encrypting(ctx
), dat
->block
);
2844 EVP_CIPHER_CTX_set_num(ctx
, num
);
2850 static int aes_ctr_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
2851 const unsigned char *in
, size_t len
)
2853 unsigned int num
= EVP_CIPHER_CTX_num(ctx
);
2854 EVP_AES_KEY
*dat
= EVP_C_DATA(EVP_AES_KEY
,ctx
);
2856 if (dat
->stream
.ctr
)
2857 CRYPTO_ctr128_encrypt_ctr32(in
, out
, len
, &dat
->ks
,
2858 EVP_CIPHER_CTX_iv_noconst(ctx
),
2859 EVP_CIPHER_CTX_buf_noconst(ctx
),
2860 &num
, dat
->stream
.ctr
);
2862 CRYPTO_ctr128_encrypt(in
, out
, len
, &dat
->ks
,
2863 EVP_CIPHER_CTX_iv_noconst(ctx
),
2864 EVP_CIPHER_CTX_buf_noconst(ctx
), &num
,
2866 EVP_CIPHER_CTX_set_num(ctx
, num
);
2870 BLOCK_CIPHER_generic_pack(NID_aes
, 128, 0)
2871 BLOCK_CIPHER_generic_pack(NID_aes
, 192, 0)
2872 BLOCK_CIPHER_generic_pack(NID_aes
, 256, 0)
2874 static int aes_gcm_cleanup(EVP_CIPHER_CTX
*c
)
2876 EVP_AES_GCM_CTX
*gctx
= EVP_C_DATA(EVP_AES_GCM_CTX
,c
);
2879 OPENSSL_cleanse(&gctx
->gcm
, sizeof(gctx
->gcm
));
2880 if (gctx
->iv
!= EVP_CIPHER_CTX_iv_noconst(c
))
2881 OPENSSL_free(gctx
->iv
);
2885 static int aes_gcm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
2887 EVP_AES_GCM_CTX
*gctx
= EVP_C_DATA(EVP_AES_GCM_CTX
,c
);
2892 gctx
->ivlen
= c
->cipher
->iv_len
;
2896 gctx
->tls_aad_len
= -1;
2899 case EVP_CTRL_AEAD_SET_IVLEN
:
2902 /* Allocate memory for IV if needed */
2903 if ((arg
> EVP_MAX_IV_LENGTH
) && (arg
> gctx
->ivlen
)) {
2904 if (gctx
->iv
!= c
->iv
)
2905 OPENSSL_free(gctx
->iv
);
2906 if ((gctx
->iv
= OPENSSL_malloc(arg
)) == NULL
) {
2907 EVPerr(EVP_F_AES_GCM_CTRL
, ERR_R_MALLOC_FAILURE
);
2914 case EVP_CTRL_AEAD_SET_TAG
:
2915 if (arg
<= 0 || arg
> 16 || c
->encrypt
)
2917 memcpy(c
->buf
, ptr
, arg
);
2921 case EVP_CTRL_AEAD_GET_TAG
:
2922 if (arg
<= 0 || arg
> 16 || !c
->encrypt
2923 || gctx
->taglen
< 0)
2925 memcpy(ptr
, c
->buf
, arg
);
2928 case EVP_CTRL_GET_IV
:
2929 if (gctx
->iv_gen
!= 1 && gctx
->iv_gen_rand
!= 1)
2931 if (gctx
->ivlen
!= arg
)
2933 memcpy(ptr
, gctx
->iv
, arg
);
2936 case EVP_CTRL_GCM_SET_IV_FIXED
:
2937 /* Special case: -1 length restores whole IV */
2939 memcpy(gctx
->iv
, ptr
, gctx
->ivlen
);
2944 * Fixed field must be at least 4 bytes and invocation field at least
2947 if ((arg
< 4) || (gctx
->ivlen
- arg
) < 8)
2950 memcpy(gctx
->iv
, ptr
, arg
);
2951 if (c
->encrypt
&& RAND_bytes(gctx
->iv
+ arg
, gctx
->ivlen
- arg
) <= 0)
2956 case EVP_CTRL_GCM_IV_GEN
:
2957 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0)
2959 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
2960 if (arg
<= 0 || arg
> gctx
->ivlen
)
2962 memcpy(ptr
, gctx
->iv
+ gctx
->ivlen
- arg
, arg
);
2964 * Invocation field will be at least 8 bytes in size and so no need
2965 * to check wrap around or increment more than last 8 bytes.
2967 ctr64_inc(gctx
->iv
+ gctx
->ivlen
- 8);
2971 case EVP_CTRL_GCM_SET_IV_INV
:
2972 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0 || c
->encrypt
)
2974 memcpy(gctx
->iv
+ gctx
->ivlen
- arg
, ptr
, arg
);
2975 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
2979 case EVP_CTRL_AEAD_TLS1_AAD
:
2980 /* Save the AAD for later use */
2981 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
2983 memcpy(c
->buf
, ptr
, arg
);
2984 gctx
->tls_aad_len
= arg
;
2985 gctx
->tls_enc_records
= 0;
2987 unsigned int len
= c
->buf
[arg
- 2] << 8 | c
->buf
[arg
- 1];
2988 /* Correct length for explicit IV */
2989 if (len
< EVP_GCM_TLS_EXPLICIT_IV_LEN
)
2991 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
2992 /* If decrypting correct for tag too */
2994 if (len
< EVP_GCM_TLS_TAG_LEN
)
2996 len
-= EVP_GCM_TLS_TAG_LEN
;
2998 c
->buf
[arg
- 2] = len
>> 8;
2999 c
->buf
[arg
- 1] = len
& 0xff;
3001 /* Extra padding: tag appended to record */
3002 return EVP_GCM_TLS_TAG_LEN
;
3006 EVP_CIPHER_CTX
*out
= ptr
;
3007 EVP_AES_GCM_CTX
*gctx_out
= EVP_C_DATA(EVP_AES_GCM_CTX
,out
);
3008 if (gctx
->gcm
.key
) {
3009 if (gctx
->gcm
.key
!= &gctx
->ks
)
3011 gctx_out
->gcm
.key
= &gctx_out
->ks
;
3013 if (gctx
->iv
== c
->iv
)
3014 gctx_out
->iv
= out
->iv
;
3016 if ((gctx_out
->iv
= OPENSSL_malloc(gctx
->ivlen
)) == NULL
) {
3017 EVPerr(EVP_F_AES_GCM_CTRL
, ERR_R_MALLOC_FAILURE
);
3020 memcpy(gctx_out
->iv
, gctx
->iv
, gctx
->ivlen
);
3031 static int aes_gcm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
3032 const unsigned char *iv
, int enc
)
3034 EVP_AES_GCM_CTX
*gctx
= EVP_C_DATA(EVP_AES_GCM_CTX
,ctx
);
3039 #ifdef HWAES_CAPABLE
3040 if (HWAES_CAPABLE
) {
3041 HWAES_set_encrypt_key(key
, ctx
->key_len
* 8, &gctx
->ks
.ks
);
3042 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
,
3043 (block128_f
) HWAES_encrypt
);
3044 # ifdef HWAES_ctr32_encrypt_blocks
3045 gctx
->ctr
= (ctr128_f
) HWAES_ctr32_encrypt_blocks
;
3052 #ifdef BSAES_CAPABLE
3053 if (BSAES_CAPABLE
) {
3054 AES_set_encrypt_key(key
, ctx
->key_len
* 8, &gctx
->ks
.ks
);
3055 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
,
3056 (block128_f
) AES_encrypt
);
3057 gctx
->ctr
= (ctr128_f
) bsaes_ctr32_encrypt_blocks
;
3061 #ifdef VPAES_CAPABLE
3062 if (VPAES_CAPABLE
) {
3063 vpaes_set_encrypt_key(key
, ctx
->key_len
* 8, &gctx
->ks
.ks
);
3064 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
,
3065 (block128_f
) vpaes_encrypt
);
3070 (void)0; /* terminate potentially open 'else' */
3072 AES_set_encrypt_key(key
, ctx
->key_len
* 8, &gctx
->ks
.ks
);
3073 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
,
3074 (block128_f
) AES_encrypt
);
3076 gctx
->ctr
= (ctr128_f
) AES_ctr32_encrypt
;
3083 * If we have an iv can set it directly, otherwise use saved IV.
3085 if (iv
== NULL
&& gctx
->iv_set
)
3088 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
3093 /* If key set use IV, otherwise copy */
3095 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
3097 memcpy(gctx
->iv
, iv
, gctx
->ivlen
);
3105 * Handle TLS GCM packet format. This consists of the last portion of the IV
3106 * followed by the payload and finally the tag. On encrypt generate IV,
3107 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3111 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
3112 const unsigned char *in
, size_t len
)
3114 EVP_AES_GCM_CTX
*gctx
= EVP_C_DATA(EVP_AES_GCM_CTX
,ctx
);
3116 /* Encrypt/decrypt must be performed in place */
3118 || len
< (EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
))
3122 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
3123 * Requirements from SP 800-38D". The requirements is for one party to the
3124 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
3127 if (ctx
->encrypt
&& ++gctx
->tls_enc_records
== 0) {
3128 EVPerr(EVP_F_AES_GCM_TLS_CIPHER
, EVP_R_TOO_MANY_RECORDS
);
3133 * Set IV from start of buffer or generate IV and write to start of
3136 if (EVP_CIPHER_CTX_ctrl(ctx
, ctx
->encrypt
? EVP_CTRL_GCM_IV_GEN
3137 : EVP_CTRL_GCM_SET_IV_INV
,
3138 EVP_GCM_TLS_EXPLICIT_IV_LEN
, out
) <= 0)
3141 if (CRYPTO_gcm128_aad(&gctx
->gcm
, ctx
->buf
, gctx
->tls_aad_len
))
3143 /* Fix buffer and length to point to payload */
3144 in
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
3145 out
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
3146 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
3148 /* Encrypt payload */
3151 #if defined(AES_GCM_ASM)
3152 if (len
>= 32 && AES_GCM_ASM(gctx
)) {
3153 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, NULL
, NULL
, 0))
3156 bulk
= AES_gcm_encrypt(in
, out
, len
,
3158 gctx
->gcm
.Yi
.c
, gctx
->gcm
.Xi
.u
);
3159 gctx
->gcm
.len
.u
[1] += bulk
;
3162 if (CRYPTO_gcm128_encrypt_ctr32(&gctx
->gcm
,
3165 len
- bulk
, gctx
->ctr
))
3169 #if defined(AES_GCM_ASM2)
3170 if (len
>= 32 && AES_GCM_ASM2(gctx
)) {
3171 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, NULL
, NULL
, 0))
3174 bulk
= AES_gcm_encrypt(in
, out
, len
,
3176 gctx
->gcm
.Yi
.c
, gctx
->gcm
.Xi
.u
);
3177 gctx
->gcm
.len
.u
[1] += bulk
;
3180 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
,
3181 in
+ bulk
, out
+ bulk
, len
- bulk
))
3185 /* Finally write tag */
3186 CRYPTO_gcm128_tag(&gctx
->gcm
, out
, EVP_GCM_TLS_TAG_LEN
);
3187 rv
= len
+ EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
3192 #if defined(AES_GCM_ASM)
3193 if (len
>= 16 && AES_GCM_ASM(gctx
)) {
3194 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, NULL
, NULL
, 0))
3197 bulk
= AES_gcm_decrypt(in
, out
, len
,
3199 gctx
->gcm
.Yi
.c
, gctx
->gcm
.Xi
.u
);
3200 gctx
->gcm
.len
.u
[1] += bulk
;
3203 if (CRYPTO_gcm128_decrypt_ctr32(&gctx
->gcm
,
3206 len
- bulk
, gctx
->ctr
))
3210 #if defined(AES_GCM_ASM2)
3211 if (len
>= 16 && AES_GCM_ASM2(gctx
)) {
3212 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, NULL
, NULL
, 0))
3215 bulk
= AES_gcm_decrypt(in
, out
, len
,
3217 gctx
->gcm
.Yi
.c
, gctx
->gcm
.Xi
.u
);
3218 gctx
->gcm
.len
.u
[1] += bulk
;
3221 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
,
3222 in
+ bulk
, out
+ bulk
, len
- bulk
))
3226 CRYPTO_gcm128_tag(&gctx
->gcm
, ctx
->buf
, EVP_GCM_TLS_TAG_LEN
);
3227 /* If tag mismatch wipe buffer */
3228 if (CRYPTO_memcmp(ctx
->buf
, in
+ len
, EVP_GCM_TLS_TAG_LEN
)) {
3229 OPENSSL_cleanse(out
, len
);
3237 gctx
->tls_aad_len
= -1;
3243 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
3245 * See also 8.2.2 RBG-based construction.
3246 * Random construction consists of a free field (which can be NULL) and a
3247 * random field which will use a DRBG that can return at least 96 bits of
3248 * entropy strength. (The DRBG must be seeded by the FIPS module).
3250 static int aes_gcm_iv_generate(EVP_AES_GCM_CTX
*gctx
, int offset
)
3252 int sz
= gctx
->ivlen
- offset
;
3254 /* Must be at least 96 bits */
3255 if (sz
<= 0 || gctx
->ivlen
< 12)
3258 /* Use DRBG to generate random iv */
3259 if (RAND_bytes(gctx
->iv
+ offset
, sz
) <= 0)
3263 #endif /* FIPS_MODE */
3265 static int aes_gcm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
3266 const unsigned char *in
, size_t len
)
3268 EVP_AES_GCM_CTX
*gctx
= EVP_C_DATA(EVP_AES_GCM_CTX
,ctx
);
3270 /* If not set up, return error */
3274 if (gctx
->tls_aad_len
>= 0)
3275 return aes_gcm_tls_cipher(ctx
, out
, in
, len
);
3279 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
3280 * The IV can still be set externally (the security policy will state that
3281 * this is not FIPS compliant). There are some applications
3282 * where setting the IV externally is the only option available.
3284 if (!gctx
->iv_set
) {
3285 if (!ctx
->encrypt
|| !aes_gcm_iv_generate(gctx
, 0))
3287 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
3289 gctx
->iv_gen_rand
= 1;
3294 #endif /* FIPS_MODE */
3298 if (CRYPTO_gcm128_aad(&gctx
->gcm
, in
, len
))
3300 } else if (ctx
->encrypt
) {
3303 #if defined(AES_GCM_ASM)
3304 if (len
>= 32 && AES_GCM_ASM(gctx
)) {
3305 size_t res
= (16 - gctx
->gcm
.mres
) % 16;
3307 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, res
))
3310 bulk
= AES_gcm_encrypt(in
+ res
,
3311 out
+ res
, len
- res
,
3312 gctx
->gcm
.key
, gctx
->gcm
.Yi
.c
,
3314 gctx
->gcm
.len
.u
[1] += bulk
;
3318 if (CRYPTO_gcm128_encrypt_ctr32(&gctx
->gcm
,
3321 len
- bulk
, gctx
->ctr
))
3325 #if defined(AES_GCM_ASM2)
3326 if (len
>= 32 && AES_GCM_ASM2(gctx
)) {
3327 size_t res
= (16 - gctx
->gcm
.mres
) % 16;
3329 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, res
))
3332 bulk
= AES_gcm_encrypt(in
+ res
,
3333 out
+ res
, len
- res
,
3334 gctx
->gcm
.key
, gctx
->gcm
.Yi
.c
,
3336 gctx
->gcm
.len
.u
[1] += bulk
;
3340 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
,
3341 in
+ bulk
, out
+ bulk
, len
- bulk
))
3347 #if defined(AES_GCM_ASM)
3348 if (len
>= 16 && AES_GCM_ASM(gctx
)) {
3349 size_t res
= (16 - gctx
->gcm
.mres
) % 16;
3351 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, res
))
3354 bulk
= AES_gcm_decrypt(in
+ res
,
3355 out
+ res
, len
- res
,
3357 gctx
->gcm
.Yi
.c
, gctx
->gcm
.Xi
.u
);
3358 gctx
->gcm
.len
.u
[1] += bulk
;
3362 if (CRYPTO_gcm128_decrypt_ctr32(&gctx
->gcm
,
3365 len
- bulk
, gctx
->ctr
))
3369 #if defined(AES_GCM_ASM2)
3370 if (len
>= 16 && AES_GCM_ASM2(gctx
)) {
3371 size_t res
= (16 - gctx
->gcm
.mres
) % 16;
3373 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, res
))
3376 bulk
= AES_gcm_decrypt(in
+ res
,
3377 out
+ res
, len
- res
,
3379 gctx
->gcm
.Yi
.c
, gctx
->gcm
.Xi
.u
);
3380 gctx
->gcm
.len
.u
[1] += bulk
;
3384 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
,
3385 in
+ bulk
, out
+ bulk
, len
- bulk
))
3391 if (!ctx
->encrypt
) {
3392 if (gctx
->taglen
< 0)
3394 if (CRYPTO_gcm128_finish(&gctx
->gcm
, ctx
->buf
, gctx
->taglen
) != 0)
3399 CRYPTO_gcm128_tag(&gctx
->gcm
, ctx
->buf
, 16);
3401 /* Don't reuse the IV */
3408 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3409 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3410 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3411 | EVP_CIPH_CUSTOM_COPY)
3413 BLOCK_CIPHER_custom(NID_aes
, 128, 1, 12, gcm
, GCM
,
3414 EVP_CIPH_FLAG_AEAD_CIPHER
| CUSTOM_FLAGS
)
3415 BLOCK_CIPHER_custom(NID_aes
, 192, 1, 12, gcm
, GCM
,
3416 EVP_CIPH_FLAG_AEAD_CIPHER
| CUSTOM_FLAGS
)
3417 BLOCK_CIPHER_custom(NID_aes
, 256, 1, 12, gcm
, GCM
,
3418 EVP_CIPH_FLAG_AEAD_CIPHER
| CUSTOM_FLAGS
)
3420 static int aes_xts_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
3422 EVP_AES_XTS_CTX
*xctx
= EVP_C_DATA(EVP_AES_XTS_CTX
, c
);
3424 if (type
== EVP_CTRL_COPY
) {
3425 EVP_CIPHER_CTX
*out
= ptr
;
3426 EVP_AES_XTS_CTX
*xctx_out
= EVP_C_DATA(EVP_AES_XTS_CTX
,out
);
3428 if (xctx
->xts
.key1
) {
3429 if (xctx
->xts
.key1
!= &xctx
->ks1
)
3431 xctx_out
->xts
.key1
= &xctx_out
->ks1
;
3433 if (xctx
->xts
.key2
) {
3434 if (xctx
->xts
.key2
!= &xctx
->ks2
)
3436 xctx_out
->xts
.key2
= &xctx_out
->ks2
;
3439 } else if (type
!= EVP_CTRL_INIT
)
3441 /* key1 and key2 are used as an indicator both key and IV are set */
3442 xctx
->xts
.key1
= NULL
;
3443 xctx
->xts
.key2
= NULL
;
3447 static int aes_xts_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
3448 const unsigned char *iv
, int enc
)
3450 EVP_AES_XTS_CTX
*xctx
= EVP_C_DATA(EVP_AES_XTS_CTX
,ctx
);
3457 /* The key is two half length keys in reality */
3458 const int bytes
= EVP_CIPHER_CTX_key_length(ctx
) / 2;
3459 const int bits
= bytes
* 8;
3462 * Verify that the two keys are different.
3464 * This addresses the vulnerability described in Rogaway's
3465 * September 2004 paper:
3467 * "Efficient Instantiations of Tweakable Blockciphers and
3468 * Refinements to Modes OCB and PMAC".
3469 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3471 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3473 * "The check for Key_1 != Key_2 shall be done at any place
3474 * BEFORE using the keys in the XTS-AES algorithm to process
3477 if ((!allow_insecure_decrypt
|| enc
)
3478 && CRYPTO_memcmp(key
, key
+ bytes
, bytes
) == 0) {
3479 EVPerr(EVP_F_AES_XTS_INIT_KEY
, EVP_R_XTS_DUPLICATED_KEYS
);
3484 xctx
->stream
= enc
? AES_xts_encrypt
: AES_xts_decrypt
;
3486 xctx
->stream
= NULL
;
3488 /* key_len is two AES keys */
3489 #ifdef HWAES_CAPABLE
3490 if (HWAES_CAPABLE
) {
3492 HWAES_set_encrypt_key(key
, bits
, &xctx
->ks1
.ks
);
3493 xctx
->xts
.block1
= (block128_f
) HWAES_encrypt
;
3494 # ifdef HWAES_xts_encrypt
3495 xctx
->stream
= HWAES_xts_encrypt
;
3498 HWAES_set_decrypt_key(key
, bits
, &xctx
->ks1
.ks
);
3499 xctx
->xts
.block1
= (block128_f
) HWAES_decrypt
;
3500 # ifdef HWAES_xts_decrypt
3501 xctx
->stream
= HWAES_xts_decrypt
;
3505 HWAES_set_encrypt_key(key
+ bytes
, bits
, &xctx
->ks2
.ks
);
3506 xctx
->xts
.block2
= (block128_f
) HWAES_encrypt
;
3508 xctx
->xts
.key1
= &xctx
->ks1
;
3512 #ifdef BSAES_CAPABLE
3514 xctx
->stream
= enc
? bsaes_xts_encrypt
: bsaes_xts_decrypt
;
3517 #ifdef VPAES_CAPABLE
3518 if (VPAES_CAPABLE
) {
3520 vpaes_set_encrypt_key(key
, bits
, &xctx
->ks1
.ks
);
3521 xctx
->xts
.block1
= (block128_f
) vpaes_encrypt
;
3523 vpaes_set_decrypt_key(key
, bits
, &xctx
->ks1
.ks
);
3524 xctx
->xts
.block1
= (block128_f
) vpaes_decrypt
;
3527 vpaes_set_encrypt_key(key
+ bytes
, bits
, &xctx
->ks2
.ks
);
3528 xctx
->xts
.block2
= (block128_f
) vpaes_encrypt
;
3530 xctx
->xts
.key1
= &xctx
->ks1
;
3534 (void)0; /* terminate potentially open 'else' */
3537 AES_set_encrypt_key(key
, bits
, &xctx
->ks1
.ks
);
3538 xctx
->xts
.block1
= (block128_f
) AES_encrypt
;
3540 AES_set_decrypt_key(key
, bits
, &xctx
->ks1
.ks
);
3541 xctx
->xts
.block1
= (block128_f
) AES_decrypt
;
3544 AES_set_encrypt_key(key
+ bytes
, bits
, &xctx
->ks2
.ks
);
3545 xctx
->xts
.block2
= (block128_f
) AES_encrypt
;
3547 xctx
->xts
.key1
= &xctx
->ks1
;
3552 xctx
->xts
.key2
= &xctx
->ks2
;
3553 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx
), iv
, 16);
3559 static int aes_xts_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
3560 const unsigned char *in
, size_t len
)
3562 EVP_AES_XTS_CTX
*xctx
= EVP_C_DATA(EVP_AES_XTS_CTX
,ctx
);
3564 if (xctx
->xts
.key1
== NULL
3565 || xctx
->xts
.key2
== NULL
3568 || len
< AES_BLOCK_SIZE
)
3572 * Impose a limit of 2^20 blocks per data unit as specifed by
3573 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3574 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3575 * NIST SP 800-38E mandates the same limit.
3577 if (len
> XTS_MAX_BLOCKS_PER_DATA_UNIT
* AES_BLOCK_SIZE
) {
3578 EVPerr(EVP_F_AES_XTS_CIPHER
, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE
);
3583 (*xctx
->stream
) (in
, out
, len
,
3584 xctx
->xts
.key1
, xctx
->xts
.key2
,
3585 EVP_CIPHER_CTX_iv_noconst(ctx
));
3586 else if (CRYPTO_xts128_encrypt(&xctx
->xts
, EVP_CIPHER_CTX_iv_noconst(ctx
),
3588 EVP_CIPHER_CTX_encrypting(ctx
)))
3593 #define aes_xts_cleanup NULL
3595 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3596 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3597 | EVP_CIPH_CUSTOM_COPY)
3599 BLOCK_CIPHER_custom(NID_aes
, 128, 1, 16, xts
, XTS
, XTS_FLAGS
)
3600 BLOCK_CIPHER_custom(NID_aes
, 256, 1, 16, xts
, XTS
, XTS_FLAGS
)
3602 static int aes_ccm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
3604 EVP_AES_CCM_CTX
*cctx
= EVP_C_DATA(EVP_AES_CCM_CTX
,c
);
3613 cctx
->tls_aad_len
= -1;
3616 case EVP_CTRL_AEAD_TLS1_AAD
:
3617 /* Save the AAD for later use */
3618 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
3620 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
3621 cctx
->tls_aad_len
= arg
;
3624 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] << 8
3625 | EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1];
3626 /* Correct length for explicit IV */
3627 if (len
< EVP_CCM_TLS_EXPLICIT_IV_LEN
)
3629 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
3630 /* If decrypting correct for tag too */
3631 if (!EVP_CIPHER_CTX_encrypting(c
)) {
3636 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] = len
>> 8;
3637 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1] = len
& 0xff;
3639 /* Extra padding: tag appended to record */
3642 case EVP_CTRL_CCM_SET_IV_FIXED
:
3643 /* Sanity check length */
3644 if (arg
!= EVP_CCM_TLS_FIXED_IV_LEN
)
3646 /* Just copy to first part of IV */
3647 memcpy(EVP_CIPHER_CTX_iv_noconst(c
), ptr
, arg
);
3650 case EVP_CTRL_AEAD_SET_IVLEN
:
3653 case EVP_CTRL_CCM_SET_L
:
3654 if (arg
< 2 || arg
> 8)
3659 case EVP_CTRL_AEAD_SET_TAG
:
3660 if ((arg
& 1) || arg
< 4 || arg
> 16)
3662 if (EVP_CIPHER_CTX_encrypting(c
) && ptr
)
3666 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
3671 case EVP_CTRL_AEAD_GET_TAG
:
3672 if (!EVP_CIPHER_CTX_encrypting(c
) || !cctx
->tag_set
)
3674 if (!CRYPTO_ccm128_tag(&cctx
->ccm
, ptr
, (size_t)arg
))
3683 EVP_CIPHER_CTX
*out
= ptr
;
3684 EVP_AES_CCM_CTX
*cctx_out
= EVP_C_DATA(EVP_AES_CCM_CTX
,out
);
3685 if (cctx
->ccm
.key
) {
3686 if (cctx
->ccm
.key
!= &cctx
->ks
)
3688 cctx_out
->ccm
.key
= &cctx_out
->ks
;
3699 static int aes_ccm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
3700 const unsigned char *iv
, int enc
)
3702 EVP_AES_CCM_CTX
*cctx
= EVP_C_DATA(EVP_AES_CCM_CTX
,ctx
);
3707 #ifdef HWAES_CAPABLE
3708 if (HWAES_CAPABLE
) {
3709 HWAES_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
3712 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
3713 &cctx
->ks
, (block128_f
) HWAES_encrypt
);
3719 #ifdef VPAES_CAPABLE
3720 if (VPAES_CAPABLE
) {
3721 vpaes_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
3723 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
3724 &cctx
->ks
, (block128_f
) vpaes_encrypt
);
3730 AES_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
3732 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
3733 &cctx
->ks
, (block128_f
) AES_encrypt
);
3738 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx
), iv
, 15 - cctx
->L
);
3744 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
3745 const unsigned char *in
, size_t len
)
3747 EVP_AES_CCM_CTX
*cctx
= EVP_C_DATA(EVP_AES_CCM_CTX
,ctx
);
3748 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
3749 /* Encrypt/decrypt must be performed in place */
3750 if (out
!= in
|| len
< (EVP_CCM_TLS_EXPLICIT_IV_LEN
+ (size_t)cctx
->M
))
3752 /* If encrypting set explicit IV from sequence number (start of AAD) */
3753 if (EVP_CIPHER_CTX_encrypting(ctx
))
3754 memcpy(out
, EVP_CIPHER_CTX_buf_noconst(ctx
),
3755 EVP_CCM_TLS_EXPLICIT_IV_LEN
);
3756 /* Get rest of IV from explicit IV */
3757 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx
) + EVP_CCM_TLS_FIXED_IV_LEN
, in
,
3758 EVP_CCM_TLS_EXPLICIT_IV_LEN
);
3759 /* Correct length value */
3760 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->M
;
3761 if (CRYPTO_ccm128_setiv(ccm
, EVP_CIPHER_CTX_iv_noconst(ctx
), 15 - cctx
->L
,
3765 CRYPTO_ccm128_aad(ccm
, EVP_CIPHER_CTX_buf_noconst(ctx
), cctx
->tls_aad_len
);
3766 /* Fix buffer to point to payload */
3767 in
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
3768 out
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
3769 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
3770 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
,
3772 CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
3774 if (!CRYPTO_ccm128_tag(ccm
, out
+ len
, cctx
->M
))
3776 return len
+ EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->M
;
3778 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
,
3780 !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
3781 unsigned char tag
[16];
3782 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
3783 if (!CRYPTO_memcmp(tag
, in
+ len
, cctx
->M
))
3787 OPENSSL_cleanse(out
, len
);
3792 static int aes_ccm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
3793 const unsigned char *in
, size_t len
)
3795 EVP_AES_CCM_CTX
*cctx
= EVP_C_DATA(EVP_AES_CCM_CTX
,ctx
);
3796 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
3797 /* If not set up, return error */
3801 if (cctx
->tls_aad_len
>= 0)
3802 return aes_ccm_tls_cipher(ctx
, out
, in
, len
);
3804 /* EVP_*Final() doesn't return any data */
3805 if (in
== NULL
&& out
!= NULL
)
3813 if (CRYPTO_ccm128_setiv(ccm
, EVP_CIPHER_CTX_iv_noconst(ctx
),
3819 /* If have AAD need message length */
3820 if (!cctx
->len_set
&& len
)
3822 CRYPTO_ccm128_aad(ccm
, in
, len
);
3826 /* The tag must be set before actually decrypting data */
3827 if (!EVP_CIPHER_CTX_encrypting(ctx
) && !cctx
->tag_set
)
3830 /* If not set length yet do it */
3831 if (!cctx
->len_set
) {
3832 if (CRYPTO_ccm128_setiv(ccm
, EVP_CIPHER_CTX_iv_noconst(ctx
),
3837 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
3838 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
,
3840 CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
3846 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
,
3848 !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
3849 unsigned char tag
[16];
3850 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
3851 if (!CRYPTO_memcmp(tag
, EVP_CIPHER_CTX_buf_noconst(ctx
),
3857 OPENSSL_cleanse(out
, len
);
3865 #define aes_ccm_cleanup NULL
3867 BLOCK_CIPHER_custom(NID_aes
, 128, 1, 12, ccm
, CCM
,
3868 EVP_CIPH_FLAG_AEAD_CIPHER
| CUSTOM_FLAGS
)
3869 BLOCK_CIPHER_custom(NID_aes
, 192, 1, 12, ccm
, CCM
,
3870 EVP_CIPH_FLAG_AEAD_CIPHER
| CUSTOM_FLAGS
)
3871 BLOCK_CIPHER_custom(NID_aes
, 256, 1, 12, ccm
, CCM
,
3872 EVP_CIPH_FLAG_AEAD_CIPHER
| CUSTOM_FLAGS
)
3879 /* Indicates if IV has been set */
3883 static int aes_wrap_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
3884 const unsigned char *iv
, int enc
)
3886 EVP_AES_WRAP_CTX
*wctx
= EVP_C_DATA(EVP_AES_WRAP_CTX
,ctx
);
3890 if (EVP_CIPHER_CTX_encrypting(ctx
))
3891 AES_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
3894 AES_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
3900 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx
), iv
, EVP_CIPHER_CTX_iv_length(ctx
));
3901 wctx
->iv
= EVP_CIPHER_CTX_iv_noconst(ctx
);
3906 static int aes_wrap_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
3907 const unsigned char *in
, size_t inlen
)
3909 EVP_AES_WRAP_CTX
*wctx
= EVP_C_DATA(EVP_AES_WRAP_CTX
,ctx
);
3911 /* AES wrap with padding has IV length of 4, without padding 8 */
3912 int pad
= EVP_CIPHER_CTX_iv_length(ctx
) == 4;
3913 /* No final operation so always return zero length */
3916 /* Input length must always be non-zero */
3919 /* If decrypting need at least 16 bytes and multiple of 8 */
3920 if (!EVP_CIPHER_CTX_encrypting(ctx
) && (inlen
< 16 || inlen
& 0x7))
3922 /* If not padding input must be multiple of 8 */
3923 if (!pad
&& inlen
& 0x7)
3925 if (is_partially_overlapping(out
, in
, inlen
)) {
3926 EVPerr(EVP_F_AES_WRAP_CIPHER
, EVP_R_PARTIALLY_OVERLAPPING
);
3930 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
3931 /* If padding round up to multiple of 8 */
3933 inlen
= (inlen
+ 7) / 8 * 8;
3938 * If not padding output will be exactly 8 bytes smaller than
3939 * input. If padding it will be at least 8 bytes smaller but we
3940 * don't know how much.
3946 if (EVP_CIPHER_CTX_encrypting(ctx
))
3947 rv
= CRYPTO_128_wrap_pad(&wctx
->ks
.ks
, wctx
->iv
,
3949 (block128_f
) AES_encrypt
);
3951 rv
= CRYPTO_128_unwrap_pad(&wctx
->ks
.ks
, wctx
->iv
,
3953 (block128_f
) AES_decrypt
);
3955 if (EVP_CIPHER_CTX_encrypting(ctx
))
3956 rv
= CRYPTO_128_wrap(&wctx
->ks
.ks
, wctx
->iv
,
3957 out
, in
, inlen
, (block128_f
) AES_encrypt
);
3959 rv
= CRYPTO_128_unwrap(&wctx
->ks
.ks
, wctx
->iv
,
3960 out
, in
, inlen
, (block128_f
) AES_decrypt
);
3962 return rv
? (int)rv
: -1;
3965 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3966 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3967 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3969 static const EVP_CIPHER aes_128_wrap
= {
3971 8, 16, 8, WRAP_FLAGS
,
3972 aes_wrap_init_key
, aes_wrap_cipher
,
3974 sizeof(EVP_AES_WRAP_CTX
),
3975 NULL
, NULL
, NULL
, NULL
3978 const EVP_CIPHER
*EVP_aes_128_wrap(void)
3980 return &aes_128_wrap
;
3983 static const EVP_CIPHER aes_192_wrap
= {
3985 8, 24, 8, WRAP_FLAGS
,
3986 aes_wrap_init_key
, aes_wrap_cipher
,
3988 sizeof(EVP_AES_WRAP_CTX
),
3989 NULL
, NULL
, NULL
, NULL
3992 const EVP_CIPHER
*EVP_aes_192_wrap(void)
3994 return &aes_192_wrap
;
3997 static const EVP_CIPHER aes_256_wrap
= {
3999 8, 32, 8, WRAP_FLAGS
,
4000 aes_wrap_init_key
, aes_wrap_cipher
,
4002 sizeof(EVP_AES_WRAP_CTX
),
4003 NULL
, NULL
, NULL
, NULL
4006 const EVP_CIPHER
*EVP_aes_256_wrap(void)
4008 return &aes_256_wrap
;
4011 static const EVP_CIPHER aes_128_wrap_pad
= {
4012 NID_id_aes128_wrap_pad
,
4013 8, 16, 4, WRAP_FLAGS
,
4014 aes_wrap_init_key
, aes_wrap_cipher
,
4016 sizeof(EVP_AES_WRAP_CTX
),
4017 NULL
, NULL
, NULL
, NULL
4020 const EVP_CIPHER
*EVP_aes_128_wrap_pad(void)
4022 return &aes_128_wrap_pad
;
4025 static const EVP_CIPHER aes_192_wrap_pad
= {
4026 NID_id_aes192_wrap_pad
,
4027 8, 24, 4, WRAP_FLAGS
,
4028 aes_wrap_init_key
, aes_wrap_cipher
,
4030 sizeof(EVP_AES_WRAP_CTX
),
4031 NULL
, NULL
, NULL
, NULL
4034 const EVP_CIPHER
*EVP_aes_192_wrap_pad(void)
4036 return &aes_192_wrap_pad
;
4039 static const EVP_CIPHER aes_256_wrap_pad
= {
4040 NID_id_aes256_wrap_pad
,
4041 8, 32, 4, WRAP_FLAGS
,
4042 aes_wrap_init_key
, aes_wrap_cipher
,
4044 sizeof(EVP_AES_WRAP_CTX
),
4045 NULL
, NULL
, NULL
, NULL
4048 const EVP_CIPHER
*EVP_aes_256_wrap_pad(void)
4050 return &aes_256_wrap_pad
;
4053 #ifndef OPENSSL_NO_OCB
4054 static int aes_ocb_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
4056 EVP_AES_OCB_CTX
*octx
= EVP_C_DATA(EVP_AES_OCB_CTX
,c
);
4057 EVP_CIPHER_CTX
*newc
;
4058 EVP_AES_OCB_CTX
*new_octx
;
4064 octx
->ivlen
= EVP_CIPHER_CTX_iv_length(c
);
4065 octx
->iv
= EVP_CIPHER_CTX_iv_noconst(c
);
4067 octx
->data_buf_len
= 0;
4068 octx
->aad_buf_len
= 0;
4071 case EVP_CTRL_AEAD_SET_IVLEN
:
4072 /* IV len must be 1 to 15 */
4073 if (arg
<= 0 || arg
> 15)
4079 case EVP_CTRL_AEAD_SET_TAG
:
4081 /* Tag len must be 0 to 16 */
4082 if (arg
< 0 || arg
> 16)
4088 if (arg
!= octx
->taglen
|| EVP_CIPHER_CTX_encrypting(c
))
4090 memcpy(octx
->tag
, ptr
, arg
);
4093 case EVP_CTRL_AEAD_GET_TAG
:
4094 if (arg
!= octx
->taglen
|| !EVP_CIPHER_CTX_encrypting(c
))
4097 memcpy(ptr
, octx
->tag
, arg
);
4101 newc
= (EVP_CIPHER_CTX
*)ptr
;
4102 new_octx
= EVP_C_DATA(EVP_AES_OCB_CTX
,newc
);
4103 return CRYPTO_ocb128_copy_ctx(&new_octx
->ocb
, &octx
->ocb
,
4104 &new_octx
->ksenc
.ks
,
4105 &new_octx
->ksdec
.ks
);
4113 # ifdef HWAES_CAPABLE
4114 # ifdef HWAES_ocb_encrypt
4115 void HWAES_ocb_encrypt(const unsigned char *in
, unsigned char *out
,
4116 size_t blocks
, const void *key
,
4117 size_t start_block_num
,
4118 unsigned char offset_i
[16],
4119 const unsigned char L_
[][16],
4120 unsigned char checksum
[16]);
4122 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4124 # ifdef HWAES_ocb_decrypt
4125 void HWAES_ocb_decrypt(const unsigned char *in
, unsigned char *out
,
4126 size_t blocks
, const void *key
,
4127 size_t start_block_num
,
4128 unsigned char offset_i
[16],
4129 const unsigned char L_
[][16],
4130 unsigned char checksum
[16]);
4132 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4136 static int aes_ocb_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
4137 const unsigned char *iv
, int enc
)
4139 EVP_AES_OCB_CTX
*octx
= EVP_C_DATA(EVP_AES_OCB_CTX
,ctx
);
4145 * We set both the encrypt and decrypt key here because decrypt
4146 * needs both. We could possibly optimise to remove setting the
4147 * decrypt for an encryption operation.
4149 # ifdef HWAES_CAPABLE
4150 if (HWAES_CAPABLE
) {
4151 HWAES_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
4153 HWAES_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
4155 if (!CRYPTO_ocb128_init(&octx
->ocb
,
4156 &octx
->ksenc
.ks
, &octx
->ksdec
.ks
,
4157 (block128_f
) HWAES_encrypt
,
4158 (block128_f
) HWAES_decrypt
,
4159 enc
? HWAES_ocb_encrypt
4160 : HWAES_ocb_decrypt
))
4165 # ifdef VPAES_CAPABLE
4166 if (VPAES_CAPABLE
) {
4167 vpaes_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
4169 vpaes_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
4171 if (!CRYPTO_ocb128_init(&octx
->ocb
,
4172 &octx
->ksenc
.ks
, &octx
->ksdec
.ks
,
4173 (block128_f
) vpaes_encrypt
,
4174 (block128_f
) vpaes_decrypt
,
4180 AES_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
4182 AES_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
4184 if (!CRYPTO_ocb128_init(&octx
->ocb
,
4185 &octx
->ksenc
.ks
, &octx
->ksdec
.ks
,
4186 (block128_f
) AES_encrypt
,
4187 (block128_f
) AES_decrypt
,
4194 * If we have an iv we can set it directly, otherwise use saved IV.
4196 if (iv
== NULL
&& octx
->iv_set
)
4199 if (CRYPTO_ocb128_setiv(&octx
->ocb
, iv
, octx
->ivlen
, octx
->taglen
)
4206 /* If key set use IV, otherwise copy */
4208 CRYPTO_ocb128_setiv(&octx
->ocb
, iv
, octx
->ivlen
, octx
->taglen
);
4210 memcpy(octx
->iv
, iv
, octx
->ivlen
);
4216 static int aes_ocb_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
4217 const unsigned char *in
, size_t len
)
4221 int written_len
= 0;
4222 size_t trailing_len
;
4223 EVP_AES_OCB_CTX
*octx
= EVP_C_DATA(EVP_AES_OCB_CTX
,ctx
);
4225 /* If IV or Key not set then return error */
4234 * Need to ensure we are only passing full blocks to low level OCB
4235 * routines. We do it here rather than in EVP_EncryptUpdate/
4236 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4237 * and those routines don't support that
4240 /* Are we dealing with AAD or normal data here? */
4242 buf
= octx
->aad_buf
;
4243 buf_len
= &(octx
->aad_buf_len
);
4245 buf
= octx
->data_buf
;
4246 buf_len
= &(octx
->data_buf_len
);
4248 if (is_partially_overlapping(out
+ *buf_len
, in
, len
)) {
4249 EVPerr(EVP_F_AES_OCB_CIPHER
, EVP_R_PARTIALLY_OVERLAPPING
);
4255 * If we've got a partially filled buffer from a previous call then
4256 * use that data first
4259 unsigned int remaining
;
4261 remaining
= AES_BLOCK_SIZE
- (*buf_len
);
4262 if (remaining
> len
) {
4263 memcpy(buf
+ (*buf_len
), in
, len
);
4267 memcpy(buf
+ (*buf_len
), in
, remaining
);
4270 * If we get here we've filled the buffer, so process it
4275 if (!CRYPTO_ocb128_aad(&octx
->ocb
, buf
, AES_BLOCK_SIZE
))
4277 } else if (EVP_CIPHER_CTX_encrypting(ctx
)) {
4278 if (!CRYPTO_ocb128_encrypt(&octx
->ocb
, buf
, out
,
4282 if (!CRYPTO_ocb128_decrypt(&octx
->ocb
, buf
, out
,
4286 written_len
= AES_BLOCK_SIZE
;
4289 out
+= AES_BLOCK_SIZE
;
4292 /* Do we have a partial block to handle at the end? */
4293 trailing_len
= len
% AES_BLOCK_SIZE
;
4296 * If we've got some full blocks to handle, then process these first
4298 if (len
!= trailing_len
) {
4300 if (!CRYPTO_ocb128_aad(&octx
->ocb
, in
, len
- trailing_len
))
4302 } else if (EVP_CIPHER_CTX_encrypting(ctx
)) {
4303 if (!CRYPTO_ocb128_encrypt
4304 (&octx
->ocb
, in
, out
, len
- trailing_len
))
4307 if (!CRYPTO_ocb128_decrypt
4308 (&octx
->ocb
, in
, out
, len
- trailing_len
))
4311 written_len
+= len
- trailing_len
;
4312 in
+= len
- trailing_len
;
4315 /* Handle any trailing partial block */
4316 if (trailing_len
> 0) {
4317 memcpy(buf
, in
, trailing_len
);
4318 *buf_len
= trailing_len
;
4324 * First of all empty the buffer of any partial block that we might
4325 * have been provided - both for data and AAD
4327 if (octx
->data_buf_len
> 0) {
4328 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
4329 if (!CRYPTO_ocb128_encrypt(&octx
->ocb
, octx
->data_buf
, out
,
4330 octx
->data_buf_len
))
4333 if (!CRYPTO_ocb128_decrypt(&octx
->ocb
, octx
->data_buf
, out
,
4334 octx
->data_buf_len
))
4337 written_len
= octx
->data_buf_len
;
4338 octx
->data_buf_len
= 0;
4340 if (octx
->aad_buf_len
> 0) {
4341 if (!CRYPTO_ocb128_aad
4342 (&octx
->ocb
, octx
->aad_buf
, octx
->aad_buf_len
))
4344 octx
->aad_buf_len
= 0;
4346 /* If decrypting then verify */
4347 if (!EVP_CIPHER_CTX_encrypting(ctx
)) {
4348 if (octx
->taglen
< 0)
4350 if (CRYPTO_ocb128_finish(&octx
->ocb
,
4351 octx
->tag
, octx
->taglen
) != 0)
4356 /* If encrypting then just get the tag */
4357 if (CRYPTO_ocb128_tag(&octx
->ocb
, octx
->tag
, 16) != 1)
4359 /* Don't reuse the IV */
4365 static int aes_ocb_cleanup(EVP_CIPHER_CTX
*c
)
4367 EVP_AES_OCB_CTX
*octx
= EVP_C_DATA(EVP_AES_OCB_CTX
,c
);
4368 CRYPTO_ocb128_cleanup(&octx
->ocb
);
4372 BLOCK_CIPHER_custom(NID_aes
, 128, 16, 12, ocb
, OCB
,
4373 EVP_CIPH_FLAG_AEAD_CIPHER
| CUSTOM_FLAGS
)
4374 BLOCK_CIPHER_custom(NID_aes
, 192, 16, 12, ocb
, OCB
,
4375 EVP_CIPH_FLAG_AEAD_CIPHER
| CUSTOM_FLAGS
)
4376 BLOCK_CIPHER_custom(NID_aes
, 256, 16, 12, ocb
, OCB
,
4377 EVP_CIPH_FLAG_AEAD_CIPHER
| CUSTOM_FLAGS
)
4378 #endif /* OPENSSL_NO_OCB */
4381 #ifndef OPENSSL_NO_SIV
4383 typedef SIV128_CONTEXT EVP_AES_SIV_CTX
;
4385 #define aesni_siv_init_key aes_siv_init_key
4386 static int aes_siv_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
4387 const unsigned char *iv
, int enc
)
4389 const EVP_CIPHER
*ctr
;
4390 const EVP_CIPHER
*cbc
;
4391 SIV128_CONTEXT
*sctx
= EVP_C_DATA(SIV128_CONTEXT
, ctx
);
4392 int klen
= EVP_CIPHER_CTX_key_length(ctx
) / 2;
4399 cbc
= EVP_aes_128_cbc();
4400 ctr
= EVP_aes_128_ctr();
4403 cbc
= EVP_aes_192_cbc();
4404 ctr
= EVP_aes_192_ctr();
4407 cbc
= EVP_aes_256_cbc();
4408 ctr
= EVP_aes_256_ctr();
4414 /* klen is the length of the underlying cipher, not the input key,
4415 which should be twice as long */
4416 return CRYPTO_siv128_init(sctx
, key
, klen
, cbc
, ctr
);
4419 #define aesni_siv_cipher aes_siv_cipher
4420 static int aes_siv_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
4421 const unsigned char *in
, size_t len
)
4423 SIV128_CONTEXT
*sctx
= EVP_C_DATA(SIV128_CONTEXT
, ctx
);
4425 /* EncryptFinal or DecryptFinal */
4427 return CRYPTO_siv128_finish(sctx
);
4429 /* Deal with associated data */
4431 return CRYPTO_siv128_aad(sctx
, in
, len
);
4433 if (EVP_CIPHER_CTX_encrypting(ctx
))
4434 return CRYPTO_siv128_encrypt(sctx
, in
, out
, len
);
4436 return CRYPTO_siv128_decrypt(sctx
, in
, out
, len
);
4439 #define aesni_siv_cleanup aes_siv_cleanup
4440 static int aes_siv_cleanup(EVP_CIPHER_CTX
*c
)
4442 SIV128_CONTEXT
*sctx
= EVP_C_DATA(SIV128_CONTEXT
, c
);
4444 return CRYPTO_siv128_cleanup(sctx
);
4448 #define aesni_siv_ctrl aes_siv_ctrl
4449 static int aes_siv_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
4451 SIV128_CONTEXT
*sctx
= EVP_C_DATA(SIV128_CONTEXT
, c
);
4452 SIV128_CONTEXT
*sctx_out
;
4456 return CRYPTO_siv128_cleanup(sctx
);
4458 case EVP_CTRL_SET_SPEED
:
4459 return CRYPTO_siv128_speed(sctx
, arg
);
4461 case EVP_CTRL_AEAD_SET_TAG
:
4462 if (!EVP_CIPHER_CTX_encrypting(c
))
4463 return CRYPTO_siv128_set_tag(sctx
, ptr
, arg
);
4466 case EVP_CTRL_AEAD_GET_TAG
:
4467 if (!EVP_CIPHER_CTX_encrypting(c
))
4469 return CRYPTO_siv128_get_tag(sctx
, ptr
, arg
);
4472 sctx_out
= EVP_C_DATA(SIV128_CONTEXT
, (EVP_CIPHER_CTX
*)ptr
);
4473 return CRYPTO_siv128_copy_ctx(sctx_out
, sctx
);
4481 #define SIV_FLAGS (EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
4482 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
4483 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CUSTOM_COPY \
4484 | EVP_CIPH_CTRL_INIT)
4486 BLOCK_CIPHER_custom(NID_aes
, 128, 1, 0, siv
, SIV
, SIV_FLAGS
)
4487 BLOCK_CIPHER_custom(NID_aes
, 192, 1, 0, siv
, SIV
, SIV_FLAGS
)
4488 BLOCK_CIPHER_custom(NID_aes
, 256, 1, 0, siv
, SIV
, SIV_FLAGS
)