]> git.ipfire.org Git - thirdparty/openssl.git/blame - crypto/evp/e_aes.c
Add Common shared code needed to move aes ciphers to providers
[thirdparty/openssl.git] / crypto / evp / e_aes.c
CommitLineData
aa6bb135 1/*
5516c19b 2 * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved.
deb2c1a1 3 *
4a8b0c55 4 * Licensed under the Apache License 2.0 (the "License"). You may not use
aa6bb135
RS
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
deb2c1a1
DSH
8 */
9
743694a6
MC
10#include <string.h>
11#include <assert.h>
8c84b677 12#include <openssl/opensslconf.h>
5158c763
MC
13#include <openssl/crypto.h>
14#include <openssl/evp.h>
15#include <openssl/err.h>
5158c763 16#include <openssl/aes.h>
743694a6
MC
17#include <openssl/rand.h>
18#include <openssl/cmac.h>
5158c763 19#include "internal/evp_int.h"
39147079 20#include "internal/cryptlib.h"
743694a6 21#include "internal/modes_int.h"
459b15d4
SL
22#include "internal/siv_int.h"
23#include "internal/aes_platform.h"
7141ba31 24#include "evp_locl.h"
0f113f3e
MC
25
26typedef struct {
27 union {
39147079 28 OSSL_UNION_ALIGN;
0f113f3e
MC
29 AES_KEY ks;
30 } ks;
31 block128_f block;
32 union {
33 cbc128_f cbc;
34 ctr128_f ctr;
35 } stream;
36} EVP_AES_KEY;
37
38typedef struct {
39 union {
39147079 40 OSSL_UNION_ALIGN;
0f113f3e
MC
41 AES_KEY ks;
42 } ks; /* AES key schedule to use */
43 int key_set; /* Set if key initialised */
44 int iv_set; /* Set if an iv is set */
45 GCM128_CONTEXT gcm;
46 unsigned char *iv; /* Temporary IV store */
47 int ivlen; /* IV length */
48 int taglen;
49 int iv_gen; /* It is OK to generate IVs */
bcf082d1 50 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
0f113f3e 51 int tls_aad_len; /* TLS AAD length */
d6b34570 52 uint64_t tls_enc_records; /* Number of TLS records encrypted */
0f113f3e
MC
53 ctr128_f ctr;
54} EVP_AES_GCM_CTX;
55
56typedef struct {
57 union {
39147079 58 OSSL_UNION_ALIGN;
0f113f3e
MC
59 AES_KEY ks;
60 } ks1, ks2; /* AES key schedules to use */
61 XTS128_CONTEXT xts;
62 void (*stream) (const unsigned char *in,
63 unsigned char *out, size_t length,
64 const AES_KEY *key1, const AES_KEY *key2,
65 const unsigned char iv[16]);
66} EVP_AES_XTS_CTX;
67
2c840201
P
68#ifdef FIPS_MODE
69static const int allow_insecure_decrypt = 0;
70#else
71static const int allow_insecure_decrypt = 1;
72#endif
73
0f113f3e
MC
74typedef struct {
75 union {
39147079 76 OSSL_UNION_ALIGN;
0f113f3e
MC
77 AES_KEY ks;
78 } ks; /* AES key schedule to use */
79 int key_set; /* Set if key initialised */
80 int iv_set; /* Set if an iv is set */
81 int tag_set; /* Set if tag is valid */
82 int len_set; /* Set if message length set */
83 int L, M; /* L and M parameters from RFC3610 */
e75c5a79 84 int tls_aad_len; /* TLS AAD length */
0f113f3e
MC
85 CCM128_CONTEXT ccm;
86 ccm128_f str;
87} EVP_AES_CCM_CTX;
88
5158c763 89#ifndef OPENSSL_NO_OCB
0f113f3e 90typedef struct {
bdc985b1 91 union {
39147079 92 OSSL_UNION_ALIGN;
bdc985b1
AP
93 AES_KEY ks;
94 } ksenc; /* AES key schedule to use for encryption */
95 union {
39147079 96 OSSL_UNION_ALIGN;
bdc985b1
AP
97 AES_KEY ks;
98 } ksdec; /* AES key schedule to use for decryption */
0f113f3e
MC
99 int key_set; /* Set if key initialised */
100 int iv_set; /* Set if an iv is set */
101 OCB128_CONTEXT ocb;
102 unsigned char *iv; /* Temporary IV store */
103 unsigned char tag[16];
104 unsigned char data_buf[16]; /* Store partial data blocks */
105 unsigned char aad_buf[16]; /* Store partial AAD blocks */
106 int data_buf_len;
107 int aad_buf_len;
108 int ivlen; /* IV length */
109 int taglen;
110} EVP_AES_OCB_CTX;
5158c763 111#endif
e6b336ef 112
5158c763 113#define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
17f121de 114
03a5e5ae
PS
115/* increment counter (64-bit int) by 1 */
116static void ctr64_inc(unsigned char *counter)
117{
118 int n = 8;
119 unsigned char c;
120
121 do {
122 --n;
123 c = counter[n];
124 ++c;
125 counter[n] = c;
126 if (c)
127 return;
128 } while (n);
129}
130
459b15d4 131#if defined(AESNI_CAPABLE)
5158c763 132# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
5158c763 133# define AES_gcm_encrypt aesni_gcm_encrypt
5158c763 134# define AES_gcm_decrypt aesni_gcm_decrypt
5158c763 135# define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
0f113f3e 136 gctx->gcm.ghash==gcm_ghash_avx)
5158c763
MC
137# undef AES_GCM_ASM2 /* minor size optimization */
138# endif
4e049c52 139
17f121de 140static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
141 const unsigned char *iv, int enc)
142{
143 int ret, mode;
6435f0f6 144 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 145
6435f0f6 146 mode = EVP_CIPHER_CTX_mode(ctx);
0f113f3e
MC
147 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
148 && !enc) {
6435f0f6
RL
149 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
150 &dat->ks.ks);
0f113f3e
MC
151 dat->block = (block128_f) aesni_decrypt;
152 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
153 (cbc128_f) aesni_cbc_encrypt : NULL;
154 } else {
6435f0f6
RL
155 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
156 &dat->ks.ks);
0f113f3e
MC
157 dat->block = (block128_f) aesni_encrypt;
158 if (mode == EVP_CIPH_CBC_MODE)
159 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
160 else if (mode == EVP_CIPH_CTR_MODE)
161 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
162 else
163 dat->stream.cbc = NULL;
164 }
165
166 if (ret < 0) {
167 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
168 return 0;
169 }
170
171 return 1;
172}
173
174static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
175 const unsigned char *in, size_t len)
d1fff483 176{
6435f0f6
RL
177 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
178 EVP_CIPHER_CTX_iv_noconst(ctx),
179 EVP_CIPHER_CTX_encrypting(ctx));
d1fff483 180
0f113f3e 181 return 1;
d1fff483
AP
182}
183
0f113f3e
MC
184static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
185 const unsigned char *in, size_t len)
d1fff483 186{
6435f0f6 187 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
d1fff483 188
0f113f3e
MC
189 if (len < bl)
190 return 1;
d1fff483 191
6435f0f6
RL
192 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
193 EVP_CIPHER_CTX_encrypting(ctx));
d1fff483 194
0f113f3e 195 return 1;
d1fff483
AP
196}
197
5158c763 198# define aesni_ofb_cipher aes_ofb_cipher
0f113f3e
MC
199static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
200 const unsigned char *in, size_t len);
d1fff483 201
5158c763 202# define aesni_cfb_cipher aes_cfb_cipher
0f113f3e
MC
203static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
204 const unsigned char *in, size_t len);
d1fff483 205
5158c763 206# define aesni_cfb8_cipher aes_cfb8_cipher
0f113f3e
MC
207static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
208 const unsigned char *in, size_t len);
d1fff483 209
5158c763 210# define aesni_cfb1_cipher aes_cfb1_cipher
0f113f3e
MC
211static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
212 const unsigned char *in, size_t len);
d1fff483 213
5158c763 214# define aesni_ctr_cipher aes_ctr_cipher
17f121de 215static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 216 const unsigned char *in, size_t len);
d1fff483 217
17f121de 218static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
219 const unsigned char *iv, int enc)
220{
6435f0f6 221 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
222 if (!iv && !key)
223 return 1;
224 if (key) {
6435f0f6
RL
225 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
226 &gctx->ks.ks);
0f113f3e
MC
227 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
228 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
229 /*
230 * If we have an iv can set it directly, otherwise use saved IV.
231 */
232 if (iv == NULL && gctx->iv_set)
233 iv = gctx->iv;
234 if (iv) {
235 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
236 gctx->iv_set = 1;
237 }
238 gctx->key_set = 1;
239 } else {
240 /* If key set use IV, otherwise copy */
241 if (gctx->key_set)
242 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
243 else
244 memcpy(gctx->iv, iv, gctx->ivlen);
245 gctx->iv_set = 1;
246 gctx->iv_gen = 0;
247 }
248 return 1;
249}
250
5158c763 251# define aesni_gcm_cipher aes_gcm_cipher
17f121de 252static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 253 const unsigned char *in, size_t len);
17f121de
AP
254
255static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
256 const unsigned char *iv, int enc)
257{
6435f0f6 258 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 259
0f113f3e
MC
260 if (!iv && !key)
261 return 1;
262
263 if (key) {
3538b0f7
P
264 /* The key is two half length keys in reality */
265 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
266 const int bits = bytes * 8;
267
268 /*
269 * Verify that the two keys are different.
270 *
271 * This addresses Rogaway's vulnerability.
272 * See comment in aes_xts_init_key() below.
273 */
2c840201
P
274 if ((!allow_insecure_decrypt || enc)
275 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3538b0f7
P
276 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
277 return 0;
278 }
279
0f113f3e
MC
280 /* key_len is two AES keys */
281 if (enc) {
3538b0f7 282 aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
283 xctx->xts.block1 = (block128_f) aesni_encrypt;
284 xctx->stream = aesni_xts_encrypt;
285 } else {
3538b0f7 286 aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
287 xctx->xts.block1 = (block128_f) aesni_decrypt;
288 xctx->stream = aesni_xts_decrypt;
289 }
290
3538b0f7 291 aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
292 xctx->xts.block2 = (block128_f) aesni_encrypt;
293
294 xctx->xts.key1 = &xctx->ks1;
295 }
296
297 if (iv) {
298 xctx->xts.key2 = &xctx->ks2;
6435f0f6 299 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
0f113f3e
MC
300 }
301
302 return 1;
303}
304
5158c763 305# define aesni_xts_cipher aes_xts_cipher
17f121de 306static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 307 const unsigned char *in, size_t len);
17f121de
AP
308
309static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
310 const unsigned char *iv, int enc)
311{
6435f0f6 312 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
313 if (!iv && !key)
314 return 1;
315 if (key) {
6435f0f6
RL
316 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
317 &cctx->ks.ks);
0f113f3e
MC
318 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
319 &cctx->ks, (block128_f) aesni_encrypt);
320 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
321 (ccm128_f) aesni_ccm64_decrypt_blocks;
322 cctx->key_set = 1;
323 }
324 if (iv) {
6435f0f6 325 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
0f113f3e
MC
326 cctx->iv_set = 1;
327 }
328 return 1;
329}
330
5158c763 331# define aesni_ccm_cipher aes_ccm_cipher
17f121de 332static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 333 const unsigned char *in, size_t len);
17f121de 334
5158c763 335# ifndef OPENSSL_NO_OCB
e6b336ef 336static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
337 const unsigned char *iv, int enc)
338{
6435f0f6 339 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
340 if (!iv && !key)
341 return 1;
342 if (key) {
343 do {
344 /*
345 * We set both the encrypt and decrypt key here because decrypt
346 * needs both. We could possibly optimise to remove setting the
347 * decrypt for an encryption operation.
348 */
6435f0f6
RL
349 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
350 &octx->ksenc.ks);
351 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
352 &octx->ksdec.ks);
bdc985b1
AP
353 if (!CRYPTO_ocb128_init(&octx->ocb,
354 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 355 (block128_f) aesni_encrypt,
bd30091c
AP
356 (block128_f) aesni_decrypt,
357 enc ? aesni_ocb_encrypt
358 : aesni_ocb_decrypt))
0f113f3e
MC
359 return 0;
360 }
361 while (0);
362
363 /*
364 * If we have an iv we can set it directly, otherwise use saved IV.
365 */
366 if (iv == NULL && octx->iv_set)
367 iv = octx->iv;
368 if (iv) {
369 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
370 != 1)
371 return 0;
372 octx->iv_set = 1;
373 }
374 octx->key_set = 1;
375 } else {
376 /* If key set use IV, otherwise copy */
377 if (octx->key_set)
378 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
379 else
380 memcpy(octx->iv, iv, octx->ivlen);
381 octx->iv_set = 1;
382 }
383 return 1;
384}
385
5158c763 386# define aesni_ocb_cipher aes_ocb_cipher
e6b336ef 387static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 388 const unsigned char *in, size_t len);
5158c763 389# endif /* OPENSSL_NO_OCB */
e6b336ef 390
5158c763 391# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
17f121de 392static const EVP_CIPHER aesni_##keylen##_##mode = { \
0f113f3e
MC
393 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
394 flags|EVP_CIPH_##MODE##_MODE, \
395 aesni_init_key, \
396 aesni_##mode##_cipher, \
397 NULL, \
398 sizeof(EVP_AES_KEY), \
399 NULL,NULL,NULL,NULL }; \
17f121de 400static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
401 nid##_##keylen##_##nmode,blocksize, \
402 keylen/8,ivlen, \
403 flags|EVP_CIPH_##MODE##_MODE, \
404 aes_init_key, \
405 aes_##mode##_cipher, \
406 NULL, \
407 sizeof(EVP_AES_KEY), \
408 NULL,NULL,NULL,NULL }; \
17f121de 409const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
8ca28da0 410{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
17f121de 411
5158c763 412# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
17f121de 413static const EVP_CIPHER aesni_##keylen##_##mode = { \
0f113f3e 414 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
415 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
416 ivlen, \
0f113f3e
MC
417 flags|EVP_CIPH_##MODE##_MODE, \
418 aesni_##mode##_init_key, \
419 aesni_##mode##_cipher, \
420 aes_##mode##_cleanup, \
421 sizeof(EVP_AES_##MODE##_CTX), \
422 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de 423static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 424 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
425 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
426 ivlen, \
0f113f3e
MC
427 flags|EVP_CIPH_##MODE##_MODE, \
428 aes_##mode##_init_key, \
429 aes_##mode##_cipher, \
430 aes_##mode##_cleanup, \
431 sizeof(EVP_AES_##MODE##_CTX), \
432 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de 433const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
8ca28da0 434{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
d1fff483 435
459b15d4 436#elif defined(SPARC_AES_CAPABLE)
c5f6da54
AP
437
438static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
439 const unsigned char *iv, int enc)
440{
441 int ret, mode, bits;
6435f0f6 442 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 443
6435f0f6
RL
444 mode = EVP_CIPHER_CTX_mode(ctx);
445 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
0f113f3e
MC
446 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
447 && !enc) {
448 ret = 0;
6435f0f6 449 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
0f113f3e
MC
450 dat->block = (block128_f) aes_t4_decrypt;
451 switch (bits) {
452 case 128:
453 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
454 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
455 break;
456 case 192:
457 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
458 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
459 break;
460 case 256:
461 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
462 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
463 break;
464 default:
465 ret = -1;
466 }
467 } else {
468 ret = 0;
6435f0f6 469 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
0f113f3e
MC
470 dat->block = (block128_f) aes_t4_encrypt;
471 switch (bits) {
472 case 128:
473 if (mode == EVP_CIPH_CBC_MODE)
474 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
475 else if (mode == EVP_CIPH_CTR_MODE)
476 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
477 else
478 dat->stream.cbc = NULL;
479 break;
480 case 192:
481 if (mode == EVP_CIPH_CBC_MODE)
482 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
483 else if (mode == EVP_CIPH_CTR_MODE)
484 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
485 else
486 dat->stream.cbc = NULL;
487 break;
488 case 256:
489 if (mode == EVP_CIPH_CBC_MODE)
490 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
491 else if (mode == EVP_CIPH_CTR_MODE)
492 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
493 else
494 dat->stream.cbc = NULL;
495 break;
496 default:
497 ret = -1;
498 }
499 }
500
501 if (ret < 0) {
502 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
503 return 0;
504 }
505
506 return 1;
507}
508
5158c763 509# define aes_t4_cbc_cipher aes_cbc_cipher
0f113f3e
MC
510static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
511 const unsigned char *in, size_t len);
512
5158c763 513# define aes_t4_ecb_cipher aes_ecb_cipher
0f113f3e
MC
514static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
515 const unsigned char *in, size_t len);
516
5158c763 517# define aes_t4_ofb_cipher aes_ofb_cipher
0f113f3e
MC
518static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
519 const unsigned char *in, size_t len);
520
5158c763 521# define aes_t4_cfb_cipher aes_cfb_cipher
0f113f3e
MC
522static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
523 const unsigned char *in, size_t len);
524
5158c763 525# define aes_t4_cfb8_cipher aes_cfb8_cipher
0f113f3e
MC
526static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
527 const unsigned char *in, size_t len);
528
5158c763 529# define aes_t4_cfb1_cipher aes_cfb1_cipher
0f113f3e
MC
530static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
531 const unsigned char *in, size_t len);
532
5158c763 533# define aes_t4_ctr_cipher aes_ctr_cipher
c5f6da54 534static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 535 const unsigned char *in, size_t len);
c5f6da54
AP
536
537static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
538 const unsigned char *iv, int enc)
539{
6435f0f6 540 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
541 if (!iv && !key)
542 return 1;
543 if (key) {
6435f0f6 544 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
0f113f3e
MC
545 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
546 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
547 (block128_f) aes_t4_encrypt);
548 switch (bits) {
549 case 128:
550 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
551 break;
552 case 192:
553 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
554 break;
555 case 256:
556 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
557 break;
558 default:
559 return 0;
560 }
561 /*
562 * If we have an iv can set it directly, otherwise use saved IV.
563 */
564 if (iv == NULL && gctx->iv_set)
565 iv = gctx->iv;
566 if (iv) {
567 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
568 gctx->iv_set = 1;
569 }
570 gctx->key_set = 1;
571 } else {
572 /* If key set use IV, otherwise copy */
573 if (gctx->key_set)
574 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
575 else
576 memcpy(gctx->iv, iv, gctx->ivlen);
577 gctx->iv_set = 1;
578 gctx->iv_gen = 0;
579 }
580 return 1;
581}
582
5158c763 583# define aes_t4_gcm_cipher aes_gcm_cipher
c5f6da54 584static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 585 const unsigned char *in, size_t len);
c5f6da54
AP
586
587static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
588 const unsigned char *iv, int enc)
589{
6435f0f6 590 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 591
0f113f3e
MC
592 if (!iv && !key)
593 return 1;
594
595 if (key) {
3538b0f7
P
596 /* The key is two half length keys in reality */
597 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
598 const int bits = bytes * 8;
599
600 /*
601 * Verify that the two keys are different.
602 *
603 * This addresses Rogaway's vulnerability.
604 * See comment in aes_xts_init_key() below.
605 */
2c840201
P
606 if ((!allow_insecure_decrypt || enc)
607 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3538b0f7
P
608 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
609 return 0;
610 }
611
0f113f3e
MC
612 xctx->stream = NULL;
613 /* key_len is two AES keys */
614 if (enc) {
615 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
616 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
617 switch (bits) {
618 case 128:
619 xctx->stream = aes128_t4_xts_encrypt;
620 break;
0f113f3e
MC
621 case 256:
622 xctx->stream = aes256_t4_xts_encrypt;
623 break;
624 default:
625 return 0;
626 }
627 } else {
3538b0f7 628 aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
629 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
630 switch (bits) {
631 case 128:
632 xctx->stream = aes128_t4_xts_decrypt;
633 break;
0f113f3e
MC
634 case 256:
635 xctx->stream = aes256_t4_xts_decrypt;
636 break;
637 default:
638 return 0;
639 }
640 }
641
3538b0f7 642 aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
643 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
644
645 xctx->xts.key1 = &xctx->ks1;
646 }
647
648 if (iv) {
649 xctx->xts.key2 = &xctx->ks2;
6435f0f6 650 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
0f113f3e
MC
651 }
652
653 return 1;
654}
655
5158c763 656# define aes_t4_xts_cipher aes_xts_cipher
c5f6da54 657static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 658 const unsigned char *in, size_t len);
c5f6da54
AP
659
660static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
661 const unsigned char *iv, int enc)
662{
6435f0f6 663 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
664 if (!iv && !key)
665 return 1;
666 if (key) {
6435f0f6 667 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
0f113f3e
MC
668 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
669 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
670 &cctx->ks, (block128_f) aes_t4_encrypt);
bdc985b1 671 cctx->str = NULL;
0f113f3e
MC
672 cctx->key_set = 1;
673 }
674 if (iv) {
6435f0f6 675 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
0f113f3e
MC
676 cctx->iv_set = 1;
677 }
678 return 1;
679}
680
5158c763 681# define aes_t4_ccm_cipher aes_ccm_cipher
c5f6da54 682static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 683 const unsigned char *in, size_t len);
c5f6da54 684
5158c763 685# ifndef OPENSSL_NO_OCB
e6b336ef 686static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
687 const unsigned char *iv, int enc)
688{
6435f0f6 689 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
690 if (!iv && !key)
691 return 1;
692 if (key) {
693 do {
694 /*
695 * We set both the encrypt and decrypt key here because decrypt
696 * needs both. We could possibly optimise to remove setting the
697 * decrypt for an encryption operation.
698 */
6435f0f6
RL
699 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
700 &octx->ksenc.ks);
701 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
702 &octx->ksdec.ks);
bdc985b1
AP
703 if (!CRYPTO_ocb128_init(&octx->ocb,
704 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 705 (block128_f) aes_t4_encrypt,
02dc0b82
AP
706 (block128_f) aes_t4_decrypt,
707 NULL))
0f113f3e
MC
708 return 0;
709 }
710 while (0);
711
712 /*
713 * If we have an iv we can set it directly, otherwise use saved IV.
714 */
715 if (iv == NULL && octx->iv_set)
716 iv = octx->iv;
717 if (iv) {
718 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
719 != 1)
720 return 0;
721 octx->iv_set = 1;
722 }
723 octx->key_set = 1;
724 } else {
725 /* If key set use IV, otherwise copy */
726 if (octx->key_set)
727 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
728 else
729 memcpy(octx->iv, iv, octx->ivlen);
730 octx->iv_set = 1;
731 }
732 return 1;
733}
734
5158c763 735# define aes_t4_ocb_cipher aes_ocb_cipher
e6b336ef 736static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 737 const unsigned char *in, size_t len);
5158c763 738# endif /* OPENSSL_NO_OCB */
e6b336ef 739
87d06aed
MC
740# ifndef OPENSSL_NO_SIV
741# define aes_t4_siv_init_key aes_siv_init_key
742# define aes_t4_siv_cipher aes_siv_cipher
743# endif /* OPENSSL_NO_SIV */
744
5158c763 745# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
c5f6da54 746static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
0f113f3e
MC
747 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
748 flags|EVP_CIPH_##MODE##_MODE, \
749 aes_t4_init_key, \
750 aes_t4_##mode##_cipher, \
751 NULL, \
752 sizeof(EVP_AES_KEY), \
753 NULL,NULL,NULL,NULL }; \
c5f6da54 754static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
755 nid##_##keylen##_##nmode,blocksize, \
756 keylen/8,ivlen, \
757 flags|EVP_CIPH_##MODE##_MODE, \
758 aes_init_key, \
759 aes_##mode##_cipher, \
760 NULL, \
761 sizeof(EVP_AES_KEY), \
762 NULL,NULL,NULL,NULL }; \
c5f6da54
AP
763const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
764{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
765
5158c763 766# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
c5f6da54 767static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
0f113f3e 768 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
769 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
770 ivlen, \
0f113f3e
MC
771 flags|EVP_CIPH_##MODE##_MODE, \
772 aes_t4_##mode##_init_key, \
773 aes_t4_##mode##_cipher, \
774 aes_##mode##_cleanup, \
775 sizeof(EVP_AES_##MODE##_CTX), \
776 NULL,NULL,aes_##mode##_ctrl,NULL }; \
c5f6da54 777static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 778 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
779 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
780 ivlen, \
0f113f3e
MC
781 flags|EVP_CIPH_##MODE##_MODE, \
782 aes_##mode##_init_key, \
783 aes_##mode##_cipher, \
784 aes_##mode##_cleanup, \
785 sizeof(EVP_AES_##MODE##_CTX), \
786 NULL,NULL,aes_##mode##_ctrl,NULL }; \
c5f6da54
AP
787const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
788{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
789
459b15d4
SL
790#elif defined(S390X_aes_128_CAPABLE)
791/* IBM S390X support */
55bd169f
PS
792typedef struct {
793 union {
39147079 794 OSSL_UNION_ALIGN;
55bd169f
PS
795 /*-
796 * KM-AES parameter block - begin
797 * (see z/Architecture Principles of Operation >= SA22-7832-06)
798 */
799 struct {
800 unsigned char k[32];
801 } param;
802 /* KM-AES parameter block - end */
803 } km;
804 unsigned int fc;
805} S390X_AES_ECB_CTX;
806
dacd2a87
PS
807typedef struct {
808 union {
39147079 809 OSSL_UNION_ALIGN;
dacd2a87
PS
810 /*-
811 * KMO-AES parameter block - begin
812 * (see z/Architecture Principles of Operation >= SA22-7832-08)
813 */
814 struct {
815 unsigned char cv[16];
816 unsigned char k[32];
817 } param;
818 /* KMO-AES parameter block - end */
819 } kmo;
820 unsigned int fc;
821
822 int res;
823} S390X_AES_OFB_CTX;
824
74d38a86
PS
825typedef struct {
826 union {
39147079 827 OSSL_UNION_ALIGN;
74d38a86
PS
828 /*-
829 * KMF-AES parameter block - begin
830 * (see z/Architecture Principles of Operation >= SA22-7832-08)
831 */
832 struct {
833 unsigned char cv[16];
834 unsigned char k[32];
835 } param;
836 /* KMF-AES parameter block - end */
837 } kmf;
838 unsigned int fc;
839
840 int res;
841} S390X_AES_CFB_CTX;
842
96530eea
PS
843typedef struct {
844 union {
39147079 845 OSSL_UNION_ALIGN;
96530eea 846 /*-
5d2a6f4b
PS
847 * KMA-GCM-AES parameter block - begin
848 * (see z/Architecture Principles of Operation >= SA22-7832-11)
96530eea
PS
849 */
850 struct {
851 unsigned char reserved[12];
852 union {
853 unsigned int w;
854 unsigned char b[4];
855 } cv;
856 union {
857 unsigned long long g[2];
858 unsigned char b[16];
859 } t;
860 unsigned char h[16];
861 unsigned long long taadl;
862 unsigned long long tpcl;
863 union {
864 unsigned long long g[2];
865 unsigned int w[4];
866 } j0;
867 unsigned char k[32];
868 } param;
5d2a6f4b 869 /* KMA-GCM-AES parameter block - end */
96530eea
PS
870 } kma;
871 unsigned int fc;
872 int key_set;
873
874 unsigned char *iv;
875 int ivlen;
876 int iv_set;
877 int iv_gen;
878
879 int taglen;
880
881 unsigned char ares[16];
882 unsigned char mres[16];
883 unsigned char kres[16];
884 int areslen;
885 int mreslen;
886 int kreslen;
887
888 int tls_aad_len;
d6b34570 889 uint64_t tls_enc_records; /* Number of TLS records encrypted */
96530eea
PS
890} S390X_AES_GCM_CTX;
891
39f5b069
PS
892typedef struct {
893 union {
39147079 894 OSSL_UNION_ALIGN;
39f5b069
PS
895 /*-
896 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
897 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
898 * rounds field is used to store the function code and that the key
899 * schedule is not stored (if aes hardware support is detected).
900 */
901 struct {
902 unsigned char pad[16];
903 AES_KEY k;
904 } key;
905
906 struct {
907 /*-
908 * KMAC-AES parameter block - begin
909 * (see z/Architecture Principles of Operation >= SA22-7832-08)
910 */
911 struct {
912 union {
913 unsigned long long g[2];
914 unsigned char b[16];
915 } icv;
916 unsigned char k[32];
917 } kmac_param;
918 /* KMAC-AES paramater block - end */
919
920 union {
921 unsigned long long g[2];
922 unsigned char b[16];
923 } nonce;
924 union {
925 unsigned long long g[2];
926 unsigned char b[16];
927 } buf;
928
929 unsigned long long blocks;
930 int l;
931 int m;
932 int tls_aad_len;
933 int iv_set;
934 int tag_set;
935 int len_set;
936 int key_set;
937
938 unsigned char pad[140];
939 unsigned int fc;
940 } ccm;
941 } aes;
942} S390X_AES_CCM_CTX;
943
96530eea
PS
944# define s390x_aes_init_key aes_init_key
945static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
946 const unsigned char *iv, int enc);
947
55bd169f
PS
948# define S390X_AES_CBC_CTX EVP_AES_KEY
949
950# define s390x_aes_cbc_init_key aes_init_key
96530eea
PS
951
952# define s390x_aes_cbc_cipher aes_cbc_cipher
953static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
954 const unsigned char *in, size_t len);
955
55bd169f
PS
956static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
957 const unsigned char *key,
958 const unsigned char *iv, int enc)
959{
960 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
961 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
962
963 cctx->fc = S390X_AES_FC(keylen);
964 if (!enc)
965 cctx->fc |= S390X_DECRYPT;
966
967 memcpy(cctx->km.param.k, key, keylen);
968 return 1;
969}
96530eea 970
96530eea 971static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
55bd169f
PS
972 const unsigned char *in, size_t len)
973{
974 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
975
976 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
977 return 1;
978}
96530eea 979
dacd2a87
PS
980static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
981 const unsigned char *key,
982 const unsigned char *ivec, int enc)
983{
984 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
985 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
986 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
987 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
55bd169f 988
dacd2a87
PS
989 memcpy(cctx->kmo.param.cv, iv, ivlen);
990 memcpy(cctx->kmo.param.k, key, keylen);
991 cctx->fc = S390X_AES_FC(keylen);
992 cctx->res = 0;
993 return 1;
994}
96530eea 995
96530eea 996static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
dacd2a87
PS
997 const unsigned char *in, size_t len)
998{
999 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1000 int n = cctx->res;
1001 int rem;
1002
1003 while (n && len) {
1004 *out = *in ^ cctx->kmo.param.cv[n];
1005 n = (n + 1) & 0xf;
1006 --len;
1007 ++in;
1008 ++out;
1009 }
1010
1011 rem = len & 0xf;
1012
1013 len &= ~(size_t)0xf;
1014 if (len) {
1015 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1016
1017 out += len;
1018 in += len;
1019 }
1020
1021 if (rem) {
1022 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1023 cctx->kmo.param.k);
1024
1025 while (rem--) {
1026 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1027 ++n;
1028 }
1029 }
1030
1031 cctx->res = n;
1032 return 1;
1033}
96530eea 1034
74d38a86
PS
1035static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1036 const unsigned char *key,
1037 const unsigned char *ivec, int enc)
1038{
1039 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1040 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1041 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1042 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1043
1044 cctx->fc = S390X_AES_FC(keylen);
1045 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1046 if (!enc)
1047 cctx->fc |= S390X_DECRYPT;
55bd169f 1048
74d38a86
PS
1049 cctx->res = 0;
1050 memcpy(cctx->kmf.param.cv, iv, ivlen);
1051 memcpy(cctx->kmf.param.k, key, keylen);
1052 return 1;
1053}
96530eea 1054
96530eea 1055static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
74d38a86
PS
1056 const unsigned char *in, size_t len)
1057{
1058 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1059 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1060 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1061 int n = cctx->res;
1062 int rem;
1063 unsigned char tmp;
1064
1065 while (n && len) {
1066 tmp = *in;
1067 *out = cctx->kmf.param.cv[n] ^ tmp;
1068 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1069 n = (n + 1) & 0xf;
1070 --len;
1071 ++in;
1072 ++out;
1073 }
1074
1075 rem = len & 0xf;
1076
1077 len &= ~(size_t)0xf;
1078 if (len) {
1079 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1080
1081 out += len;
1082 in += len;
1083 }
1084
1085 if (rem) {
1086 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1087 S390X_AES_FC(keylen), cctx->kmf.param.k);
1088
1089 while (rem--) {
1090 tmp = in[n];
1091 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1092 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1093 ++n;
1094 }
1095 }
96530eea 1096
74d38a86
PS
1097 cctx->res = n;
1098 return 1;
1099}
1100
74d38a86
PS
1101static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1102 const unsigned char *key,
1103 const unsigned char *ivec, int enc)
1104{
1105 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1106 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1107 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1108 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1109
1110 cctx->fc = S390X_AES_FC(keylen);
1111 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1112 if (!enc)
1113 cctx->fc |= S390X_DECRYPT;
96530eea 1114
74d38a86
PS
1115 memcpy(cctx->kmf.param.cv, iv, ivlen);
1116 memcpy(cctx->kmf.param.k, key, keylen);
1117 return 1;
1118}
55bd169f 1119
96530eea 1120static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
74d38a86
PS
1121 const unsigned char *in, size_t len)
1122{
1123 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1124
1125 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1126 return 1;
1127}
96530eea 1128
55bd169f
PS
1129# define s390x_aes_cfb1_init_key aes_init_key
1130
96530eea
PS
1131# define s390x_aes_cfb1_cipher aes_cfb1_cipher
1132static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1133 const unsigned char *in, size_t len);
1134
55bd169f
PS
1135# define S390X_AES_CTR_CTX EVP_AES_KEY
1136
1137# define s390x_aes_ctr_init_key aes_init_key
96530eea
PS
1138
1139# define s390x_aes_ctr_cipher aes_ctr_cipher
1140static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1141 const unsigned char *in, size_t len);
1142
bcf082d1 1143/* iv + padding length for iv lengths != 12 */
96530eea
PS
1144# define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1145
5d2a6f4b
PS
1146/*-
1147 * Process additional authenticated data. Returns 0 on success. Code is
1148 * big-endian.
1149 */
96530eea
PS
1150static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1151 size_t len)
1152{
1153 unsigned long long alen;
1154 int n, rem;
1155
1156 if (ctx->kma.param.tpcl)
1157 return -2;
1158
1159 alen = ctx->kma.param.taadl + len;
1160 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1161 return -1;
1162 ctx->kma.param.taadl = alen;
1163
1164 n = ctx->areslen;
1165 if (n) {
1166 while (n && len) {
1167 ctx->ares[n] = *aad;
1168 n = (n + 1) & 0xf;
1169 ++aad;
1170 --len;
1171 }
1172 /* ctx->ares contains a complete block if offset has wrapped around */
1173 if (!n) {
1174 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1175 ctx->fc |= S390X_KMA_HS;
1176 }
1177 ctx->areslen = n;
1178 }
1179
1180 rem = len & 0xf;
1181
25868993 1182 len &= ~(size_t)0xf;
96530eea
PS
1183 if (len) {
1184 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1185 aad += len;
1186 ctx->fc |= S390X_KMA_HS;
1187 }
1188
1189 if (rem) {
1190 ctx->areslen = rem;
1191
1192 do {
1193 --rem;
1194 ctx->ares[rem] = aad[rem];
1195 } while (rem);
1196 }
1197 return 0;
1198}
1199
5d2a6f4b
PS
1200/*-
1201 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1202 * success. Code is big-endian.
1203 */
96530eea
PS
1204static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1205 unsigned char *out, size_t len)
1206{
1207 const unsigned char *inptr;
1208 unsigned long long mlen;
1209 union {
1210 unsigned int w[4];
1211 unsigned char b[16];
1212 } buf;
1213 size_t inlen;
1214 int n, rem, i;
1215
1216 mlen = ctx->kma.param.tpcl + len;
1217 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1218 return -1;
1219 ctx->kma.param.tpcl = mlen;
1220
1221 n = ctx->mreslen;
1222 if (n) {
1223 inptr = in;
1224 inlen = len;
1225 while (n && inlen) {
1226 ctx->mres[n] = *inptr;
1227 n = (n + 1) & 0xf;
1228 ++inptr;
1229 --inlen;
1230 }
1231 /* ctx->mres contains a complete block if offset has wrapped around */
1232 if (!n) {
1233 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1234 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1235 ctx->fc |= S390X_KMA_HS;
1236 ctx->areslen = 0;
1237
1238 /* previous call already encrypted/decrypted its remainder,
1239 * see comment below */
1240 n = ctx->mreslen;
1241 while (n) {
1242 *out = buf.b[n];
1243 n = (n + 1) & 0xf;
1244 ++out;
1245 ++in;
1246 --len;
1247 }
1248 ctx->mreslen = 0;
1249 }
1250 }
1251
1252 rem = len & 0xf;
1253
25868993 1254 len &= ~(size_t)0xf;
96530eea
PS
1255 if (len) {
1256 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1257 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1258 in += len;
1259 out += len;
1260 ctx->fc |= S390X_KMA_HS;
1261 ctx->areslen = 0;
1262 }
1263
1264 /*-
1265 * If there is a remainder, it has to be saved such that it can be
1266 * processed by kma later. However, we also have to do the for-now
1267 * unauthenticated encryption/decryption part here and now...
1268 */
1269 if (rem) {
1270 if (!ctx->mreslen) {
1271 buf.w[0] = ctx->kma.param.j0.w[0];
1272 buf.w[1] = ctx->kma.param.j0.w[1];
1273 buf.w[2] = ctx->kma.param.j0.w[2];
1274 buf.w[3] = ctx->kma.param.cv.w + 1;
1275 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1276 }
1277
1278 n = ctx->mreslen;
1279 for (i = 0; i < rem; i++) {
1280 ctx->mres[n + i] = in[i];
1281 out[i] = in[i] ^ ctx->kres[n + i];
1282 }
1283
1284 ctx->mreslen += rem;
1285 }
1286 return 0;
1287}
1288
5d2a6f4b
PS
1289/*-
1290 * Initialize context structure. Code is big-endian.
1291 */
96530eea
PS
1292static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1293 const unsigned char *iv)
1294{
1295 ctx->kma.param.t.g[0] = 0;
1296 ctx->kma.param.t.g[1] = 0;
1297 ctx->kma.param.tpcl = 0;
1298 ctx->kma.param.taadl = 0;
1299 ctx->mreslen = 0;
1300 ctx->areslen = 0;
1301 ctx->kreslen = 0;
1302
1303 if (ctx->ivlen == 12) {
1304 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1305 ctx->kma.param.j0.w[3] = 1;
1306 ctx->kma.param.cv.w = 1;
1307 } else {
1308 /* ctx->iv has the right size and is already padded. */
1309 memcpy(ctx->iv, iv, ctx->ivlen);
1310 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1311 ctx->fc, &ctx->kma.param);
1312 ctx->fc |= S390X_KMA_HS;
1313
1314 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1315 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1316 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1317 ctx->kma.param.t.g[0] = 0;
1318 ctx->kma.param.t.g[1] = 0;
1319 }
1320}
1321
5d2a6f4b
PS
1322/*-
1323 * Performs various operations on the context structure depending on control
1324 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1325 * Code is big-endian.
1326 */
96530eea
PS
1327static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1328{
1329 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1330 S390X_AES_GCM_CTX *gctx_out;
1331 EVP_CIPHER_CTX *out;
1332 unsigned char *buf, *iv;
1333 int ivlen, enc, len;
1334
1335 switch (type) {
1336 case EVP_CTRL_INIT:
1337 ivlen = EVP_CIPHER_CTX_iv_length(c);
1338 iv = EVP_CIPHER_CTX_iv_noconst(c);
1339 gctx->key_set = 0;
1340 gctx->iv_set = 0;
1341 gctx->ivlen = ivlen;
1342 gctx->iv = iv;
1343 gctx->taglen = -1;
1344 gctx->iv_gen = 0;
1345 gctx->tls_aad_len = -1;
1346 return 1;
1347
1348 case EVP_CTRL_AEAD_SET_IVLEN:
1349 if (arg <= 0)
1350 return 0;
1351
1352 if (arg != 12) {
1353 iv = EVP_CIPHER_CTX_iv_noconst(c);
1354 len = S390X_gcm_ivpadlen(arg);
1355
1356 /* Allocate memory for iv if needed. */
1357 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1358 if (gctx->iv != iv)
1359 OPENSSL_free(gctx->iv);
1360
cdb10bae
RS
1361 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1362 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
96530eea 1363 return 0;
cdb10bae 1364 }
96530eea
PS
1365 }
1366 /* Add padding. */
1367 memset(gctx->iv + arg, 0, len - arg - 8);
1368 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1369 }
1370 gctx->ivlen = arg;
1371 return 1;
1372
1373 case EVP_CTRL_AEAD_SET_TAG:
1374 buf = EVP_CIPHER_CTX_buf_noconst(c);
1375 enc = EVP_CIPHER_CTX_encrypting(c);
1376 if (arg <= 0 || arg > 16 || enc)
1377 return 0;
1378
1379 memcpy(buf, ptr, arg);
1380 gctx->taglen = arg;
1381 return 1;
1382
1383 case EVP_CTRL_AEAD_GET_TAG:
1384 enc = EVP_CIPHER_CTX_encrypting(c);
1385 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1386 return 0;
1387
1388 memcpy(ptr, gctx->kma.param.t.b, arg);
1389 return 1;
1390
1391 case EVP_CTRL_GCM_SET_IV_FIXED:
1392 /* Special case: -1 length restores whole iv */
1393 if (arg == -1) {
1394 memcpy(gctx->iv, ptr, gctx->ivlen);
1395 gctx->iv_gen = 1;
1396 return 1;
1397 }
1398 /*
1399 * Fixed field must be at least 4 bytes and invocation field at least
1400 * 8.
1401 */
1402 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1403 return 0;
1404
1405 if (arg)
1406 memcpy(gctx->iv, ptr, arg);
1407
1408 enc = EVP_CIPHER_CTX_encrypting(c);
16cfc2c9
KR
1409 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1410 return 0;
96530eea
PS
1411
1412 gctx->iv_gen = 1;
1413 return 1;
1414
1415 case EVP_CTRL_GCM_IV_GEN:
1416 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1417 return 0;
1418
1419 s390x_aes_gcm_setiv(gctx, gctx->iv);
1420
1421 if (arg <= 0 || arg > gctx->ivlen)
1422 arg = gctx->ivlen;
1423
1424 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1425 /*
1426 * Invocation field will be at least 8 bytes in size and so no need
1427 * to check wrap around or increment more than last 8 bytes.
1428 */
03a5e5ae 1429 ctr64_inc(gctx->iv + gctx->ivlen - 8);
96530eea
PS
1430 gctx->iv_set = 1;
1431 return 1;
1432
1433 case EVP_CTRL_GCM_SET_IV_INV:
1434 enc = EVP_CIPHER_CTX_encrypting(c);
1435 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1436 return 0;
1437
1438 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1439 s390x_aes_gcm_setiv(gctx, gctx->iv);
1440 gctx->iv_set = 1;
1441 return 1;
1442
1443 case EVP_CTRL_AEAD_TLS1_AAD:
1444 /* Save the aad for later use. */
1445 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1446 return 0;
1447
1448 buf = EVP_CIPHER_CTX_buf_noconst(c);
1449 memcpy(buf, ptr, arg);
1450 gctx->tls_aad_len = arg;
d6b34570 1451 gctx->tls_enc_records = 0;
96530eea
PS
1452
1453 len = buf[arg - 2] << 8 | buf[arg - 1];
1454 /* Correct length for explicit iv. */
1455 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1456 return 0;
1457 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1458
1459 /* If decrypting correct for tag too. */
1460 enc = EVP_CIPHER_CTX_encrypting(c);
1461 if (!enc) {
1462 if (len < EVP_GCM_TLS_TAG_LEN)
1463 return 0;
1464 len -= EVP_GCM_TLS_TAG_LEN;
1465 }
1466 buf[arg - 2] = len >> 8;
1467 buf[arg - 1] = len & 0xff;
1468 /* Extra padding: tag appended to record. */
1469 return EVP_GCM_TLS_TAG_LEN;
1470
1471 case EVP_CTRL_COPY:
1472 out = ptr;
1473 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1474 iv = EVP_CIPHER_CTX_iv_noconst(c);
1475
1476 if (gctx->iv == iv) {
1477 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1478 } else {
1479 len = S390X_gcm_ivpadlen(gctx->ivlen);
1480
cdb10bae
RS
1481 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1482 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
96530eea 1483 return 0;
cdb10bae 1484 }
96530eea
PS
1485
1486 memcpy(gctx_out->iv, gctx->iv, len);
1487 }
1488 return 1;
1489
1490 default:
1491 return -1;
1492 }
1493}
1494
5d2a6f4b
PS
1495/*-
1496 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1497 */
96530eea
PS
1498static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1499 const unsigned char *key,
1500 const unsigned char *iv, int enc)
1501{
1502 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1503 int keylen;
1504
1505 if (iv == NULL && key == NULL)
1506 return 1;
1507
1508 if (key != NULL) {
1509 keylen = EVP_CIPHER_CTX_key_length(ctx);
1510 memcpy(&gctx->kma.param.k, key, keylen);
1511
8eb399fb 1512 gctx->fc = S390X_AES_FC(keylen);
96530eea
PS
1513 if (!enc)
1514 gctx->fc |= S390X_DECRYPT;
1515
1516 if (iv == NULL && gctx->iv_set)
1517 iv = gctx->iv;
1518
1519 if (iv != NULL) {
1520 s390x_aes_gcm_setiv(gctx, iv);
1521 gctx->iv_set = 1;
1522 }
1523 gctx->key_set = 1;
1524 } else {
1525 if (gctx->key_set)
1526 s390x_aes_gcm_setiv(gctx, iv);
1527 else
1528 memcpy(gctx->iv, iv, gctx->ivlen);
1529
1530 gctx->iv_set = 1;
1531 gctx->iv_gen = 0;
1532 }
1533 return 1;
1534}
1535
5d2a6f4b
PS
1536/*-
1537 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1538 * if successful. Otherwise -1 is returned. Code is big-endian.
1539 */
96530eea
PS
1540static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1541 const unsigned char *in, size_t len)
1542{
1543 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1544 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1545 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1546 int rv = -1;
1547
1548 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1549 return -1;
1550
d6b34570
P
1551 /*
1552 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1553 * Requirements from SP 800-38D". The requirements is for one party to the
1554 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1555 * side only.
1556 */
1557 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1558 EVPerr(EVP_F_S390X_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
1559 goto err;
1560 }
1561
96530eea
PS
1562 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1563 : EVP_CTRL_GCM_SET_IV_INV,
1564 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1565 goto err;
1566
1567 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1568 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1569 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1570
1571 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1572 gctx->kma.param.tpcl = len << 3;
1573 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1574 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1575
1576 if (enc) {
1577 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1578 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1579 } else {
1580 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1581 EVP_GCM_TLS_TAG_LEN)) {
1582 OPENSSL_cleanse(out, len);
1583 goto err;
1584 }
1585 rv = len;
1586 }
1587err:
1588 gctx->iv_set = 0;
1589 gctx->tls_aad_len = -1;
1590 return rv;
1591}
1592
5d2a6f4b
PS
1593/*-
1594 * Called from EVP layer to initialize context, process additional
1595 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1596 * ciphertext or process a TLS packet, depending on context. Returns bytes
1597 * written on success. Otherwise -1 is returned. Code is big-endian.
1598 */
96530eea
PS
1599static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1600 const unsigned char *in, size_t len)
1601{
1602 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1603 unsigned char *buf, tmp[16];
1604 int enc;
1605
1606 if (!gctx->key_set)
1607 return -1;
1608
1609 if (gctx->tls_aad_len >= 0)
1610 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1611
1612 if (!gctx->iv_set)
1613 return -1;
1614
1615 if (in != NULL) {
1616 if (out == NULL) {
1617 if (s390x_aes_gcm_aad(gctx, in, len))
1618 return -1;
1619 } else {
1620 if (s390x_aes_gcm(gctx, in, out, len))
1621 return -1;
1622 }
1623 return len;
1624 } else {
1625 gctx->kma.param.taadl <<= 3;
1626 gctx->kma.param.tpcl <<= 3;
1627 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1628 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1629 /* recall that we already did en-/decrypt gctx->mres
1630 * and returned it to caller... */
1631 OPENSSL_cleanse(tmp, gctx->mreslen);
1632 gctx->iv_set = 0;
1633
1634 enc = EVP_CIPHER_CTX_encrypting(ctx);
1635 if (enc) {
1636 gctx->taglen = 16;
1637 } else {
1638 if (gctx->taglen < 0)
1639 return -1;
1640
1641 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1642 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1643 return -1;
1644 }
1645 return 0;
1646 }
1647}
1648
1649static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1650{
1651 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1652 const unsigned char *iv;
1653
1654 if (gctx == NULL)
1655 return 0;
1656
1657 iv = EVP_CIPHER_CTX_iv(c);
1658 if (iv != gctx->iv)
1659 OPENSSL_free(gctx->iv);
1660
1661 OPENSSL_cleanse(gctx, sizeof(*gctx));
1662 return 1;
1663}
1664
1665# define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
96530eea
PS
1666
1667# define s390x_aes_xts_init_key aes_xts_init_key
1668static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1669 const unsigned char *key,
1670 const unsigned char *iv, int enc);
1671# define s390x_aes_xts_cipher aes_xts_cipher
1672static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1673 const unsigned char *in, size_t len);
1674# define s390x_aes_xts_ctrl aes_xts_ctrl
1675static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1676# define s390x_aes_xts_cleanup aes_xts_cleanup
1677
39f5b069
PS
1678/*-
1679 * Set nonce and length fields. Code is big-endian.
1680 */
1681static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1682 const unsigned char *nonce,
1683 size_t mlen)
1684{
1685 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1686 ctx->aes.ccm.nonce.g[1] = mlen;
1687 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1688}
1689
1690/*-
1691 * Process additional authenticated data. Code is big-endian.
1692 */
1693static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1694 size_t alen)
1695{
1696 unsigned char *ptr;
1697 int i, rem;
1698
1699 if (!alen)
1700 return;
1701
1702 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1703
1704 /* Suppress 'type-punned pointer dereference' warning. */
1705 ptr = ctx->aes.ccm.buf.b;
1706
1707 if (alen < ((1 << 16) - (1 << 8))) {
1708 *(uint16_t *)ptr = alen;
1709 i = 2;
1710 } else if (sizeof(alen) == 8
1711 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1712 *(uint16_t *)ptr = 0xffff;
1713 *(uint64_t *)(ptr + 2) = alen;
1714 i = 10;
1715 } else {
1716 *(uint16_t *)ptr = 0xfffe;
1717 *(uint32_t *)(ptr + 2) = alen;
1718 i = 6;
1719 }
1720
1721 while (i < 16 && alen) {
1722 ctx->aes.ccm.buf.b[i] = *aad;
1723 ++aad;
1724 --alen;
1725 ++i;
1726 }
1727 while (i < 16) {
1728 ctx->aes.ccm.buf.b[i] = 0;
1729 ++i;
1730 }
1731
1732 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1733 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1734 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1735 &ctx->aes.ccm.kmac_param);
1736 ctx->aes.ccm.blocks += 2;
1737
1738 rem = alen & 0xf;
25868993 1739 alen &= ~(size_t)0xf;
39f5b069
PS
1740 if (alen) {
1741 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1742 ctx->aes.ccm.blocks += alen >> 4;
1743 aad += alen;
1744 }
1745 if (rem) {
1746 for (i = 0; i < rem; i++)
1747 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1748
1749 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1750 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1751 ctx->aes.ccm.kmac_param.k);
1752 ctx->aes.ccm.blocks++;
1753 }
1754}
1755
1756/*-
1757 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1758 * success.
1759 */
1760static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1761 unsigned char *out, size_t len, int enc)
1762{
1763 size_t n, rem;
1764 unsigned int i, l, num;
1765 unsigned char flags;
1766
1767 flags = ctx->aes.ccm.nonce.b[0];
1768 if (!(flags & S390X_CCM_AAD_FLAG)) {
1769 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1770 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1771 ctx->aes.ccm.blocks++;
1772 }
1773 l = flags & 0x7;
1774 ctx->aes.ccm.nonce.b[0] = l;
1775
1776 /*-
1777 * Reconstruct length from encoded length field
1778 * and initialize it with counter value.
1779 */
1780 n = 0;
1781 for (i = 15 - l; i < 15; i++) {
1782 n |= ctx->aes.ccm.nonce.b[i];
1783 ctx->aes.ccm.nonce.b[i] = 0;
1784 n <<= 8;
1785 }
1786 n |= ctx->aes.ccm.nonce.b[15];
1787 ctx->aes.ccm.nonce.b[15] = 1;
1788
1789 if (n != len)
1790 return -1; /* length mismatch */
1791
1792 if (enc) {
1793 /* Two operations per block plus one for tag encryption */
1794 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1795 if (ctx->aes.ccm.blocks > (1ULL << 61))
1796 return -2; /* too much data */
1797 }
1798
1799 num = 0;
1800 rem = len & 0xf;
25868993 1801 len &= ~(size_t)0xf;
39f5b069
PS
1802
1803 if (enc) {
1804 /* mac-then-encrypt */
1805 if (len)
1806 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1807 if (rem) {
1808 for (i = 0; i < rem; i++)
1809 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1810
1811 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1812 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1813 ctx->aes.ccm.kmac_param.k);
1814 }
1815
1816 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1817 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1818 &num, (ctr128_f)AES_ctr32_encrypt);
1819 } else {
1820 /* decrypt-then-mac */
1821 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1822 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1823 &num, (ctr128_f)AES_ctr32_encrypt);
1824
1825 if (len)
1826 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1827 if (rem) {
1828 for (i = 0; i < rem; i++)
1829 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1830
1831 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1832 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1833 ctx->aes.ccm.kmac_param.k);
1834 }
1835 }
1836 /* encrypt tag */
1837 for (i = 15 - l; i < 16; i++)
1838 ctx->aes.ccm.nonce.b[i] = 0;
1839
1840 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1841 ctx->aes.ccm.kmac_param.k);
1842 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1843 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1844
1845 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
1846 return 0;
1847}
1848
1849/*-
1850 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1851 * if successful. Otherwise -1 is returned.
1852 */
1853static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1854 const unsigned char *in, size_t len)
1855{
1856 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1857 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1858 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1859 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1860
1861 if (out != in
1862 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1863 return -1;
1864
1865 if (enc) {
1866 /* Set explicit iv (sequence number). */
1867 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1868 }
1869
1870 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1871 /*-
1872 * Get explicit iv (sequence number). We already have fixed iv
1873 * (server/client_write_iv) here.
1874 */
1875 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1876 s390x_aes_ccm_setiv(cctx, ivec, len);
1877
1878 /* Process aad (sequence number|type|version|length) */
1879 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1880
1881 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1882 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
96530eea 1883
39f5b069
PS
1884 if (enc) {
1885 if (s390x_aes_ccm(cctx, in, out, len, enc))
1886 return -1;
1887
1888 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1889 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1890 } else {
1891 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1892 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1893 cctx->aes.ccm.m))
1894 return len;
1895 }
1896
1897 OPENSSL_cleanse(out, len);
1898 return -1;
1899 }
1900}
1901
1902/*-
1903 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1904 * returned.
1905 */
96530eea
PS
1906static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1907 const unsigned char *key,
39f5b069
PS
1908 const unsigned char *iv, int enc)
1909{
1910 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1911 unsigned char *ivec;
1912 int keylen;
1913
1914 if (iv == NULL && key == NULL)
1915 return 1;
1916
1917 if (key != NULL) {
1918 keylen = EVP_CIPHER_CTX_key_length(ctx);
8eb399fb 1919 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
39f5b069
PS
1920 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1921
1922 /* Store encoded m and l. */
1923 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1924 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1925 memset(cctx->aes.ccm.nonce.b + 1, 0,
1926 sizeof(cctx->aes.ccm.nonce.b));
1927 cctx->aes.ccm.blocks = 0;
1928
1929 cctx->aes.ccm.key_set = 1;
1930 }
1931
1932 if (iv != NULL) {
1933 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1934 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
1935
1936 cctx->aes.ccm.iv_set = 1;
1937 }
1938
1939 return 1;
1940}
1941
1942/*-
1943 * Called from EVP layer to initialize context, process additional
1944 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1945 * plaintext or process a TLS packet, depending on context. Returns bytes
1946 * written on success. Otherwise -1 is returned.
1947 */
96530eea 1948static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
39f5b069
PS
1949 const unsigned char *in, size_t len)
1950{
1951 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1952 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1953 int rv;
1954 unsigned char *buf, *ivec;
1955
1956 if (!cctx->aes.ccm.key_set)
1957 return -1;
1958
1959 if (cctx->aes.ccm.tls_aad_len >= 0)
1960 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1961
1962 /*-
1963 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1964 * so integrity must be checked already at Update() i.e., before
1965 * potentially corrupted data is output.
1966 */
1967 if (in == NULL && out != NULL)
1968 return 0;
1969
1970 if (!cctx->aes.ccm.iv_set)
1971 return -1;
1972
39f5b069
PS
1973 if (out == NULL) {
1974 /* Update(): Pass message length. */
1975 if (in == NULL) {
1976 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1977 s390x_aes_ccm_setiv(cctx, ivec, len);
1978
1979 cctx->aes.ccm.len_set = 1;
1980 return len;
1981 }
1982
1983 /* Update(): Process aad. */
1984 if (!cctx->aes.ccm.len_set && len)
1985 return -1;
1986
1987 s390x_aes_ccm_aad(cctx, in, len);
1988 return len;
1989 }
1990
887e22dd
PS
1991 /* The tag must be set before actually decrypting data */
1992 if (!enc && !cctx->aes.ccm.tag_set)
1993 return -1;
1994
39f5b069
PS
1995 /* Update(): Process message. */
1996
1997 if (!cctx->aes.ccm.len_set) {
1998 /*-
46d08509 1999 * In case message length was not previously set explicitly via
39f5b069
PS
2000 * Update(), set it now.
2001 */
2002 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2003 s390x_aes_ccm_setiv(cctx, ivec, len);
2004
2005 cctx->aes.ccm.len_set = 1;
2006 }
2007
2008 if (enc) {
2009 if (s390x_aes_ccm(cctx, in, out, len, enc))
2010 return -1;
2011
2012 cctx->aes.ccm.tag_set = 1;
2013 return len;
2014 } else {
2015 rv = -1;
2016
2017 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2018 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2019 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2020 cctx->aes.ccm.m))
2021 rv = len;
2022 }
2023
2024 if (rv == -1)
2025 OPENSSL_cleanse(out, len);
2026
2027 cctx->aes.ccm.iv_set = 0;
2028 cctx->aes.ccm.tag_set = 0;
2029 cctx->aes.ccm.len_set = 0;
2030 return rv;
2031 }
2032}
2033
2034/*-
2035 * Performs various operations on the context structure depending on control
2036 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2037 * Code is big-endian.
2038 */
2039static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2040{
2041 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2042 unsigned char *buf, *iv;
2043 int enc, len;
2044
2045 switch (type) {
2046 case EVP_CTRL_INIT:
2047 cctx->aes.ccm.key_set = 0;
2048 cctx->aes.ccm.iv_set = 0;
2049 cctx->aes.ccm.l = 8;
2050 cctx->aes.ccm.m = 12;
2051 cctx->aes.ccm.tag_set = 0;
2052 cctx->aes.ccm.len_set = 0;
2053 cctx->aes.ccm.tls_aad_len = -1;
2054 return 1;
2055
2056 case EVP_CTRL_AEAD_TLS1_AAD:
2057 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2058 return 0;
2059
2060 /* Save the aad for later use. */
2061 buf = EVP_CIPHER_CTX_buf_noconst(c);
2062 memcpy(buf, ptr, arg);
2063 cctx->aes.ccm.tls_aad_len = arg;
2064
03a5e5ae 2065 len = buf[arg - 2] << 8 | buf[arg - 1];
39f5b069
PS
2066 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2067 return 0;
2068
2069 /* Correct length for explicit iv. */
2070 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2071
2072 enc = EVP_CIPHER_CTX_encrypting(c);
2073 if (!enc) {
2074 if (len < cctx->aes.ccm.m)
2075 return 0;
2076
2077 /* Correct length for tag. */
2078 len -= cctx->aes.ccm.m;
2079 }
2080
03a5e5ae
PS
2081 buf[arg - 2] = len >> 8;
2082 buf[arg - 1] = len & 0xff;
2083
39f5b069
PS
2084 /* Extra padding: tag appended to record. */
2085 return cctx->aes.ccm.m;
2086
2087 case EVP_CTRL_CCM_SET_IV_FIXED:
2088 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2089 return 0;
2090
2091 /* Copy to first part of the iv. */
2092 iv = EVP_CIPHER_CTX_iv_noconst(c);
2093 memcpy(iv, ptr, arg);
2094 return 1;
2095
2096 case EVP_CTRL_AEAD_SET_IVLEN:
2097 arg = 15 - arg;
2098 /* fall-through */
2099
2100 case EVP_CTRL_CCM_SET_L:
2101 if (arg < 2 || arg > 8)
2102 return 0;
2103
2104 cctx->aes.ccm.l = arg;
2105 return 1;
2106
2107 case EVP_CTRL_AEAD_SET_TAG:
2108 if ((arg & 1) || arg < 4 || arg > 16)
2109 return 0;
2110
2111 enc = EVP_CIPHER_CTX_encrypting(c);
2112 if (enc && ptr)
2113 return 0;
2114
2115 if (ptr) {
2116 cctx->aes.ccm.tag_set = 1;
2117 buf = EVP_CIPHER_CTX_buf_noconst(c);
2118 memcpy(buf, ptr, arg);
2119 }
2120
2121 cctx->aes.ccm.m = arg;
2122 return 1;
2123
2124 case EVP_CTRL_AEAD_GET_TAG:
2125 enc = EVP_CIPHER_CTX_encrypting(c);
2126 if (!enc || !cctx->aes.ccm.tag_set)
2127 return 0;
2128
2129 if(arg < cctx->aes.ccm.m)
2130 return 0;
2131
2132 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2133 cctx->aes.ccm.tag_set = 0;
2134 cctx->aes.ccm.iv_set = 0;
2135 cctx->aes.ccm.len_set = 0;
2136 return 1;
2137
2138 case EVP_CTRL_COPY:
2139 return 1;
2140
2141 default:
2142 return -1;
2143 }
2144}
2145
96530eea
PS
2146# define s390x_aes_ccm_cleanup aes_ccm_cleanup
2147
2148# ifndef OPENSSL_NO_OCB
2149# define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
96530eea
PS
2150
2151# define s390x_aes_ocb_init_key aes_ocb_init_key
2152static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2153 const unsigned char *iv, int enc);
2154# define s390x_aes_ocb_cipher aes_ocb_cipher
2155static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2156 const unsigned char *in, size_t len);
2157# define s390x_aes_ocb_cleanup aes_ocb_cleanup
2158static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2159# define s390x_aes_ocb_ctrl aes_ocb_ctrl
2160static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2161# endif
2162
e74be3d4
RL
2163# ifndef OPENSSL_NO_SIV
2164# define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
e74be3d4
RL
2165
2166# define s390x_aes_siv_init_key aes_siv_init_key
2167# define s390x_aes_siv_cipher aes_siv_cipher
2168# define s390x_aes_siv_cleanup aes_siv_cleanup
2169# define s390x_aes_siv_ctrl aes_siv_ctrl
2170# endif
2171
96530eea
PS
2172# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2173 MODE,flags) \
2174static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2175 nid##_##keylen##_##nmode,blocksize, \
2176 keylen / 8, \
2177 ivlen, \
2178 flags | EVP_CIPH_##MODE##_MODE, \
55bd169f 2179 s390x_aes_##mode##_init_key, \
96530eea
PS
2180 s390x_aes_##mode##_cipher, \
2181 NULL, \
55bd169f 2182 sizeof(S390X_AES_##MODE##_CTX), \
96530eea
PS
2183 NULL, \
2184 NULL, \
2185 NULL, \
2186 NULL \
2187}; \
2188static const EVP_CIPHER aes_##keylen##_##mode = { \
2189 nid##_##keylen##_##nmode, \
2190 blocksize, \
2191 keylen / 8, \
2192 ivlen, \
2193 flags | EVP_CIPH_##MODE##_MODE, \
2194 aes_init_key, \
2195 aes_##mode##_cipher, \
2196 NULL, \
2197 sizeof(EVP_AES_KEY), \
55bd169f
PS
2198 NULL, \
2199 NULL, \
2200 NULL, \
2201 NULL \
96530eea
PS
2202}; \
2203const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2204{ \
2205 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2206 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2207}
2208
2209# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2210static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2211 nid##_##keylen##_##mode, \
2212 blocksize, \
e74be3d4 2213 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
96530eea
PS
2214 ivlen, \
2215 flags | EVP_CIPH_##MODE##_MODE, \
2216 s390x_aes_##mode##_init_key, \
2217 s390x_aes_##mode##_cipher, \
2218 s390x_aes_##mode##_cleanup, \
2219 sizeof(S390X_AES_##MODE##_CTX), \
2220 NULL, \
2221 NULL, \
2222 s390x_aes_##mode##_ctrl, \
2223 NULL \
2224}; \
2225static const EVP_CIPHER aes_##keylen##_##mode = { \
2226 nid##_##keylen##_##mode,blocksize, \
e74be3d4 2227 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
96530eea
PS
2228 ivlen, \
2229 flags | EVP_CIPH_##MODE##_MODE, \
2230 aes_##mode##_init_key, \
2231 aes_##mode##_cipher, \
2232 aes_##mode##_cleanup, \
2233 sizeof(EVP_AES_##MODE##_CTX), \
2234 NULL, \
2235 NULL, \
2236 aes_##mode##_ctrl, \
2237 NULL \
2238}; \
2239const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2240{ \
2241 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2242 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2243}
2244
5158c763 2245#else
17f121de 2246
5158c763 2247# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
17f121de 2248static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
2249 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2250 flags|EVP_CIPH_##MODE##_MODE, \
2251 aes_init_key, \
2252 aes_##mode##_cipher, \
2253 NULL, \
2254 sizeof(EVP_AES_KEY), \
2255 NULL,NULL,NULL,NULL }; \
17f121de
AP
2256const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2257{ return &aes_##keylen##_##mode; }
d1fff483 2258
5158c763 2259# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
17f121de 2260static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 2261 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
2262 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2263 ivlen, \
0f113f3e
MC
2264 flags|EVP_CIPH_##MODE##_MODE, \
2265 aes_##mode##_init_key, \
2266 aes_##mode##_cipher, \
2267 aes_##mode##_cleanup, \
2268 sizeof(EVP_AES_##MODE##_CTX), \
2269 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de
AP
2270const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2271{ return &aes_##keylen##_##mode; }
9575d1a9 2272
5158c763 2273#endif
9575d1a9 2274
5158c763 2275#define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
0f113f3e
MC
2276 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2277 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2278 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2279 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2280 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2281 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2282 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
d1fff483
AP
2283
2284static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
2285 const unsigned char *iv, int enc)
2286{
2287 int ret, mode;
6435f0f6 2288 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2289
6435f0f6 2290 mode = EVP_CIPHER_CTX_mode(ctx);
0f113f3e 2291 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
c01a3c6d 2292 && !enc) {
5158c763 2293#ifdef HWAES_CAPABLE
0f113f3e 2294 if (HWAES_CAPABLE) {
6435f0f6
RL
2295 ret = HWAES_set_decrypt_key(key,
2296 EVP_CIPHER_CTX_key_length(ctx) * 8,
2297 &dat->ks.ks);
0f113f3e
MC
2298 dat->block = (block128_f) HWAES_decrypt;
2299 dat->stream.cbc = NULL;
5158c763 2300# ifdef HWAES_cbc_encrypt
0f113f3e
MC
2301 if (mode == EVP_CIPH_CBC_MODE)
2302 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
0f113f3e 2303# endif
5158c763
MC
2304 } else
2305#endif
2306#ifdef BSAES_CAPABLE
0f113f3e 2307 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
6435f0f6
RL
2308 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2309 &dat->ks.ks);
0f113f3e
MC
2310 dat->block = (block128_f) AES_decrypt;
2311 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2312 } else
5158c763
MC
2313#endif
2314#ifdef VPAES_CAPABLE
0f113f3e 2315 if (VPAES_CAPABLE) {
6435f0f6
RL
2316 ret = vpaes_set_decrypt_key(key,
2317 EVP_CIPHER_CTX_key_length(ctx) * 8,
2318 &dat->ks.ks);
0f113f3e
MC
2319 dat->block = (block128_f) vpaes_decrypt;
2320 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2321 (cbc128_f) vpaes_cbc_encrypt : NULL;
2322 } else
5158c763 2323#endif
0f113f3e 2324 {
6435f0f6
RL
2325 ret = AES_set_decrypt_key(key,
2326 EVP_CIPHER_CTX_key_length(ctx) * 8,
2327 &dat->ks.ks);
0f113f3e
MC
2328 dat->block = (block128_f) AES_decrypt;
2329 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2330 (cbc128_f) AES_cbc_encrypt : NULL;
c01a3c6d 2331 }
0f113f3e 2332 } else
5158c763 2333#ifdef HWAES_CAPABLE
0f113f3e 2334 if (HWAES_CAPABLE) {
6435f0f6
RL
2335 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2336 &dat->ks.ks);
0f113f3e
MC
2337 dat->block = (block128_f) HWAES_encrypt;
2338 dat->stream.cbc = NULL;
5158c763 2339# ifdef HWAES_cbc_encrypt
0f113f3e
MC
2340 if (mode == EVP_CIPH_CBC_MODE)
2341 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2342 else
5158c763
MC
2343# endif
2344# ifdef HWAES_ctr32_encrypt_blocks
0f113f3e
MC
2345 if (mode == EVP_CIPH_CTR_MODE)
2346 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2347 else
5158c763 2348# endif
0f113f3e
MC
2349 (void)0; /* terminate potentially open 'else' */
2350 } else
5158c763
MC
2351#endif
2352#ifdef BSAES_CAPABLE
0f113f3e 2353 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
6435f0f6
RL
2354 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2355 &dat->ks.ks);
0f113f3e
MC
2356 dat->block = (block128_f) AES_encrypt;
2357 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2358 } else
5158c763
MC
2359#endif
2360#ifdef VPAES_CAPABLE
0f113f3e 2361 if (VPAES_CAPABLE) {
6435f0f6
RL
2362 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2363 &dat->ks.ks);
0f113f3e
MC
2364 dat->block = (block128_f) vpaes_encrypt;
2365 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2366 (cbc128_f) vpaes_cbc_encrypt : NULL;
2367 } else
5158c763 2368#endif
0f113f3e 2369 {
6435f0f6
RL
2370 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2371 &dat->ks.ks);
0f113f3e
MC
2372 dat->block = (block128_f) AES_encrypt;
2373 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2374 (cbc128_f) AES_cbc_encrypt : NULL;
5158c763 2375#ifdef AES_CTR_ASM
0f113f3e
MC
2376 if (mode == EVP_CIPH_CTR_MODE)
2377 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
5158c763 2378#endif
0f113f3e 2379 }
d1fff483 2380
0f113f3e
MC
2381 if (ret < 0) {
2382 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2383 return 0;
2384 }
d1fff483 2385
0f113f3e
MC
2386 return 1;
2387}
d1fff483 2388
0f113f3e
MC
2389static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2390 const unsigned char *in, size_t len)
17f121de 2391{
6435f0f6 2392 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2393
0f113f3e 2394 if (dat->stream.cbc)
6435f0f6
RL
2395 (*dat->stream.cbc) (in, out, len, &dat->ks,
2396 EVP_CIPHER_CTX_iv_noconst(ctx),
2397 EVP_CIPHER_CTX_encrypting(ctx));
2398 else if (EVP_CIPHER_CTX_encrypting(ctx))
2399 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2400 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
0f113f3e 2401 else
6435f0f6
RL
2402 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2403 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
17f121de 2404
0f113f3e 2405 return 1;
17f121de
AP
2406}
2407
0f113f3e
MC
2408static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2409 const unsigned char *in, size_t len)
17f121de 2410{
6435f0f6 2411 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
0f113f3e 2412 size_t i;
6435f0f6 2413 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
17f121de 2414
0f113f3e
MC
2415 if (len < bl)
2416 return 1;
17f121de 2417
0f113f3e
MC
2418 for (i = 0, len -= bl; i <= len; i += bl)
2419 (*dat->block) (in + i, out + i, &dat->ks);
17f121de 2420
0f113f3e 2421 return 1;
17f121de 2422}
deb2c1a1 2423
0f113f3e
MC
2424static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2425 const unsigned char *in, size_t len)
17f121de 2426{
6435f0f6 2427 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2428
6435f0f6 2429 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2430 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
6435f0f6
RL
2431 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2432 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2433 return 1;
17f121de 2434}
deb2c1a1 2435
0f113f3e
MC
2436static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2437 const unsigned char *in, size_t len)
17f121de 2438{
6435f0f6 2439 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2440
6435f0f6 2441 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2442 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
6435f0f6
RL
2443 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2444 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2445 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2446 return 1;
17f121de
AP
2447}
2448
0f113f3e
MC
2449static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2450 const unsigned char *in, size_t len)
17f121de 2451{
6435f0f6 2452 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2453
6435f0f6 2454 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2455 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
6435f0f6
RL
2456 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2457 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2458 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2459 return 1;
17f121de 2460}
8d1ebe0b 2461
0f113f3e
MC
2462static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2463 const unsigned char *in, size_t len)
17f121de 2464{
6435f0f6 2465 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2466
6435f0f6
RL
2467 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2468 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2469 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
6435f0f6
RL
2470 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2471 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2472 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e
MC
2473 return 1;
2474 }
2475
2476 while (len >= MAXBITCHUNK) {
6435f0f6 2477 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2478 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
6435f0f6
RL
2479 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2480 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2481 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2482 len -= MAXBITCHUNK;
604e591e
BE
2483 out += MAXBITCHUNK;
2484 in += MAXBITCHUNK;
0f113f3e 2485 }
6435f0f6
RL
2486 if (len) {
2487 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2488 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
6435f0f6
RL
2489 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2490 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2491 EVP_CIPHER_CTX_set_num(ctx, num);
2492 }
0f113f3e
MC
2493
2494 return 1;
17f121de 2495}
8d1ebe0b 2496
0f113f3e
MC
2497static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2498 const unsigned char *in, size_t len)
d976f992 2499{
6435f0f6
RL
2500 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2501 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e
MC
2502
2503 if (dat->stream.ctr)
2504 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
6435f0f6
RL
2505 EVP_CIPHER_CTX_iv_noconst(ctx),
2506 EVP_CIPHER_CTX_buf_noconst(ctx),
2507 &num, dat->stream.ctr);
0f113f3e
MC
2508 else
2509 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
6435f0f6
RL
2510 EVP_CIPHER_CTX_iv_noconst(ctx),
2511 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2512 dat->block);
2513 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2514 return 1;
d976f992
AP
2515}
2516
0f113f3e
MC
2517BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2518 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2519 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
bdaa5415
DSH
2520
2521static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
0f113f3e 2522{
6435f0f6 2523 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
273a0218
BE
2524 if (gctx == NULL)
2525 return 0;
0f113f3e 2526 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
6435f0f6 2527 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
0f113f3e
MC
2528 OPENSSL_free(gctx->iv);
2529 return 1;
2530}
bdaa5415
DSH
2531
2532static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 2533{
6435f0f6 2534 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
0f113f3e
MC
2535 switch (type) {
2536 case EVP_CTRL_INIT:
2537 gctx->key_set = 0;
2538 gctx->iv_set = 0;
c5307d9c
AP
2539 gctx->ivlen = c->cipher->iv_len;
2540 gctx->iv = c->iv;
0f113f3e
MC
2541 gctx->taglen = -1;
2542 gctx->iv_gen = 0;
2543 gctx->tls_aad_len = -1;
2544 return 1;
2545
e640fa02 2546 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e
MC
2547 if (arg <= 0)
2548 return 0;
2549 /* Allocate memory for IV if needed */
2550 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
c5307d9c 2551 if (gctx->iv != c->iv)
0f113f3e 2552 OPENSSL_free(gctx->iv);
cdb10bae
RS
2553 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2554 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
0f113f3e 2555 return 0;
cdb10bae 2556 }
0f113f3e
MC
2557 }
2558 gctx->ivlen = arg;
2559 return 1;
2560
e640fa02 2561 case EVP_CTRL_AEAD_SET_TAG:
c5307d9c 2562 if (arg <= 0 || arg > 16 || c->encrypt)
0f113f3e 2563 return 0;
c5307d9c 2564 memcpy(c->buf, ptr, arg);
0f113f3e
MC
2565 gctx->taglen = arg;
2566 return 1;
2567
e640fa02 2568 case EVP_CTRL_AEAD_GET_TAG:
c5307d9c 2569 if (arg <= 0 || arg > 16 || !c->encrypt
6435f0f6 2570 || gctx->taglen < 0)
0f113f3e 2571 return 0;
c5307d9c 2572 memcpy(ptr, c->buf, arg);
0f113f3e
MC
2573 return 1;
2574
ecd1557f 2575 case EVP_CTRL_GET_IV:
bcf082d1 2576 if (gctx->iv_gen != 1 && gctx->iv_gen_rand != 1)
ecd1557f
BP
2577 return 0;
2578 if (gctx->ivlen != arg)
2579 return 0;
2580 memcpy(ptr, gctx->iv, arg);
2581 return 1;
2582
0f113f3e
MC
2583 case EVP_CTRL_GCM_SET_IV_FIXED:
2584 /* Special case: -1 length restores whole IV */
2585 if (arg == -1) {
2586 memcpy(gctx->iv, ptr, gctx->ivlen);
2587 gctx->iv_gen = 1;
2588 return 1;
2589 }
2590 /*
2591 * Fixed field must be at least 4 bytes and invocation field at least
2592 * 8.
2593 */
2594 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2595 return 0;
2596 if (arg)
2597 memcpy(gctx->iv, ptr, arg);
c5307d9c 2598 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
16cfc2c9 2599 return 0;
0f113f3e
MC
2600 gctx->iv_gen = 1;
2601 return 1;
2602
2603 case EVP_CTRL_GCM_IV_GEN:
2604 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2605 return 0;
2606 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2607 if (arg <= 0 || arg > gctx->ivlen)
2608 arg = gctx->ivlen;
2609 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2610 /*
2611 * Invocation field will be at least 8 bytes in size and so no need
2612 * to check wrap around or increment more than last 8 bytes.
2613 */
2614 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2615 gctx->iv_set = 1;
2616 return 1;
2617
2618 case EVP_CTRL_GCM_SET_IV_INV:
c5307d9c 2619 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
0f113f3e
MC
2620 return 0;
2621 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2622 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2623 gctx->iv_set = 1;
2624 return 1;
2625
2626 case EVP_CTRL_AEAD_TLS1_AAD:
2627 /* Save the AAD for later use */
c8269881 2628 if (arg != EVP_AEAD_TLS1_AAD_LEN)
0f113f3e 2629 return 0;
c5307d9c 2630 memcpy(c->buf, ptr, arg);
0f113f3e 2631 gctx->tls_aad_len = arg;
d6b34570 2632 gctx->tls_enc_records = 0;
0f113f3e 2633 {
c5307d9c 2634 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
0f113f3e 2635 /* Correct length for explicit IV */
2198b3a5
AP
2636 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2637 return 0;
0f113f3e
MC
2638 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2639 /* If decrypting correct for tag too */
c5307d9c 2640 if (!c->encrypt) {
2198b3a5
AP
2641 if (len < EVP_GCM_TLS_TAG_LEN)
2642 return 0;
0f113f3e 2643 len -= EVP_GCM_TLS_TAG_LEN;
2198b3a5 2644 }
c5307d9c
AP
2645 c->buf[arg - 2] = len >> 8;
2646 c->buf[arg - 1] = len & 0xff;
0f113f3e
MC
2647 }
2648 /* Extra padding: tag appended to record */
2649 return EVP_GCM_TLS_TAG_LEN;
2650
2651 case EVP_CTRL_COPY:
2652 {
2653 EVP_CIPHER_CTX *out = ptr;
6435f0f6 2654 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
0f113f3e
MC
2655 if (gctx->gcm.key) {
2656 if (gctx->gcm.key != &gctx->ks)
2657 return 0;
2658 gctx_out->gcm.key = &gctx_out->ks;
2659 }
c5307d9c
AP
2660 if (gctx->iv == c->iv)
2661 gctx_out->iv = out->iv;
0f113f3e 2662 else {
cdb10bae
RS
2663 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2664 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
0f113f3e 2665 return 0;
cdb10bae 2666 }
0f113f3e
MC
2667 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2668 }
2669 return 1;
2670 }
2671
2672 default:
2673 return -1;
2674
2675 }
2676}
bdaa5415
DSH
2677
2678static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
2679 const unsigned char *iv, int enc)
2680{
6435f0f6 2681 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
2682 if (!iv && !key)
2683 return 1;
2684 if (key) {
2685 do {
5158c763 2686#ifdef HWAES_CAPABLE
0f113f3e 2687 if (HWAES_CAPABLE) {
c5307d9c 2688 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2689 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2690 (block128_f) HWAES_encrypt);
5158c763 2691# ifdef HWAES_ctr32_encrypt_blocks
0f113f3e 2692 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
5158c763 2693# else
0f113f3e 2694 gctx->ctr = NULL;
5158c763 2695# endif
0f113f3e
MC
2696 break;
2697 } else
5158c763
MC
2698#endif
2699#ifdef BSAES_CAPABLE
0f113f3e 2700 if (BSAES_CAPABLE) {
c5307d9c 2701 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2702 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2703 (block128_f) AES_encrypt);
2704 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2705 break;
2706 } else
5158c763
MC
2707#endif
2708#ifdef VPAES_CAPABLE
0f113f3e 2709 if (VPAES_CAPABLE) {
c5307d9c 2710 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2711 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2712 (block128_f) vpaes_encrypt);
2713 gctx->ctr = NULL;
2714 break;
2715 } else
5158c763 2716#endif
0f113f3e
MC
2717 (void)0; /* terminate potentially open 'else' */
2718
c5307d9c 2719 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2720 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2721 (block128_f) AES_encrypt);
5158c763 2722#ifdef AES_CTR_ASM
0f113f3e 2723 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
5158c763 2724#else
0f113f3e 2725 gctx->ctr = NULL;
5158c763 2726#endif
0f113f3e
MC
2727 } while (0);
2728
2729 /*
2730 * If we have an iv can set it directly, otherwise use saved IV.
2731 */
2732 if (iv == NULL && gctx->iv_set)
2733 iv = gctx->iv;
2734 if (iv) {
2735 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2736 gctx->iv_set = 1;
2737 }
2738 gctx->key_set = 1;
2739 } else {
2740 /* If key set use IV, otherwise copy */
2741 if (gctx->key_set)
2742 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2743 else
2744 memcpy(gctx->iv, iv, gctx->ivlen);
2745 gctx->iv_set = 1;
2746 gctx->iv_gen = 0;
2747 }
2748 return 1;
2749}
2750
2751/*
2752 * Handle TLS GCM packet format. This consists of the last portion of the IV
28dd49fa
DSH
2753 * followed by the payload and finally the tag. On encrypt generate IV,
2754 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2755 * and verify tag.
2756 */
2757
2758static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
2759 const unsigned char *in, size_t len)
2760{
6435f0f6 2761 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
2762 int rv = -1;
2763 /* Encrypt/decrypt must be performed in place */
2764 if (out != in
2765 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2766 return -1;
df443918 2767
d6b34570
P
2768 /*
2769 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2770 * Requirements from SP 800-38D". The requirements is for one party to the
2771 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
2772 * side only.
2773 */
2774 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
2775 EVPerr(EVP_F_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
2776 goto err;
2777 }
2778
0f113f3e
MC
2779 /*
2780 * Set IV from start of buffer or generate IV and write to start of
2781 * buffer.
2782 */
c5307d9c
AP
2783 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
2784 : EVP_CTRL_GCM_SET_IV_INV,
0f113f3e
MC
2785 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2786 goto err;
2787 /* Use saved AAD */
c5307d9c 2788 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
0f113f3e
MC
2789 goto err;
2790 /* Fix buffer and length to point to payload */
2791 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2792 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2793 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
c5307d9c 2794 if (ctx->encrypt) {
0f113f3e
MC
2795 /* Encrypt payload */
2796 if (gctx->ctr) {
2797 size_t bulk = 0;
5158c763 2798#if defined(AES_GCM_ASM)
0f113f3e
MC
2799 if (len >= 32 && AES_GCM_ASM(gctx)) {
2800 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2801 return -1;
2802
2803 bulk = AES_gcm_encrypt(in, out, len,
2804 gctx->gcm.key,
2805 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2806 gctx->gcm.len.u[1] += bulk;
2807 }
5158c763 2808#endif
0f113f3e
MC
2809 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2810 in + bulk,
2811 out + bulk,
2812 len - bulk, gctx->ctr))
2813 goto err;
2814 } else {
2815 size_t bulk = 0;
5158c763 2816#if defined(AES_GCM_ASM2)
0f113f3e
MC
2817 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2818 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2819 return -1;
2820
2821 bulk = AES_gcm_encrypt(in, out, len,
2822 gctx->gcm.key,
2823 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2824 gctx->gcm.len.u[1] += bulk;
2825 }
5158c763 2826#endif
0f113f3e
MC
2827 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2828 in + bulk, out + bulk, len - bulk))
2829 goto err;
2830 }
2831 out += len;
2832 /* Finally write tag */
2833 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2834 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2835 } else {
2836 /* Decrypt */
2837 if (gctx->ctr) {
2838 size_t bulk = 0;
5158c763 2839#if defined(AES_GCM_ASM)
0f113f3e
MC
2840 if (len >= 16 && AES_GCM_ASM(gctx)) {
2841 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2842 return -1;
2843
2844 bulk = AES_gcm_decrypt(in, out, len,
2845 gctx->gcm.key,
2846 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2847 gctx->gcm.len.u[1] += bulk;
2848 }
5158c763 2849#endif
0f113f3e
MC
2850 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2851 in + bulk,
2852 out + bulk,
2853 len - bulk, gctx->ctr))
2854 goto err;
2855 } else {
2856 size_t bulk = 0;
5158c763 2857#if defined(AES_GCM_ASM2)
0f113f3e
MC
2858 if (len >= 16 && AES_GCM_ASM2(gctx)) {
2859 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2860 return -1;
2861
2862 bulk = AES_gcm_decrypt(in, out, len,
2863 gctx->gcm.key,
2864 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2865 gctx->gcm.len.u[1] += bulk;
2866 }
5158c763 2867#endif
0f113f3e
MC
2868 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2869 in + bulk, out + bulk, len - bulk))
2870 goto err;
2871 }
2872 /* Retrieve tag */
c5307d9c 2873 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
0f113f3e 2874 /* If tag mismatch wipe buffer */
c5307d9c 2875 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
0f113f3e
MC
2876 OPENSSL_cleanse(out, len);
2877 goto err;
2878 }
2879 rv = len;
2880 }
2881
2882 err:
2883 gctx->iv_set = 0;
2884 gctx->tls_aad_len = -1;
2885 return rv;
2886}
28dd49fa 2887
bcf082d1
SL
2888#ifdef FIPS_MODE
2889/*
2890 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
2891 *
2892 * See also 8.2.2 RBG-based construction.
2893 * Random construction consists of a free field (which can be NULL) and a
2894 * random field which will use a DRBG that can return at least 96 bits of
2895 * entropy strength. (The DRBG must be seeded by the FIPS module).
2896 */
2897static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
2898{
2899 int sz = gctx->ivlen - offset;
2900
2901 /* Must be at least 96 bits */
2902 if (sz <= 0 || gctx->ivlen < 12)
2903 return 0;
2904
2905 /* Use DRBG to generate random iv */
2906 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
2907 return 0;
2908 return 1;
2909}
2910#endif /* FIPS_MODE */
2911
17f121de 2912static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
2913 const unsigned char *in, size_t len)
2914{
6435f0f6 2915 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
bcf082d1 2916
0f113f3e
MC
2917 /* If not set up, return error */
2918 if (!gctx->key_set)
2919 return -1;
2920
2921 if (gctx->tls_aad_len >= 0)
2922 return aes_gcm_tls_cipher(ctx, out, in, len);
2923
bcf082d1
SL
2924#ifdef FIPS_MODE
2925 /*
2926 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
2927 * The IV can still be set externally (the security policy will state that
2928 * this is not FIPS compliant). There are some applications
2929 * where setting the IV externally is the only option available.
2930 */
2931 if (!gctx->iv_set) {
2932 if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
2933 return -1;
2934 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2935 gctx->iv_set = 1;
2936 gctx->iv_gen_rand = 1;
2937 }
2938#else
0f113f3e
MC
2939 if (!gctx->iv_set)
2940 return -1;
bcf082d1
SL
2941#endif /* FIPS_MODE */
2942
0f113f3e
MC
2943 if (in) {
2944 if (out == NULL) {
2945 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2946 return -1;
c5307d9c 2947 } else if (ctx->encrypt) {
0f113f3e
MC
2948 if (gctx->ctr) {
2949 size_t bulk = 0;
5158c763 2950#if defined(AES_GCM_ASM)
0f113f3e
MC
2951 if (len >= 32 && AES_GCM_ASM(gctx)) {
2952 size_t res = (16 - gctx->gcm.mres) % 16;
2953
2954 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2955 return -1;
2956
2957 bulk = AES_gcm_encrypt(in + res,
2958 out + res, len - res,
2959 gctx->gcm.key, gctx->gcm.Yi.c,
2960 gctx->gcm.Xi.u);
2961 gctx->gcm.len.u[1] += bulk;
2962 bulk += res;
2963 }
5158c763 2964#endif
0f113f3e
MC
2965 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2966 in + bulk,
2967 out + bulk,
2968 len - bulk, gctx->ctr))
2969 return -1;
2970 } else {
2971 size_t bulk = 0;
5158c763 2972#if defined(AES_GCM_ASM2)
0f113f3e
MC
2973 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2974 size_t res = (16 - gctx->gcm.mres) % 16;
2975
2976 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2977 return -1;
2978
2979 bulk = AES_gcm_encrypt(in + res,
2980 out + res, len - res,
2981 gctx->gcm.key, gctx->gcm.Yi.c,
2982 gctx->gcm.Xi.u);
2983 gctx->gcm.len.u[1] += bulk;
2984 bulk += res;
2985 }
5158c763 2986#endif
0f113f3e
MC
2987 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2988 in + bulk, out + bulk, len - bulk))
2989 return -1;
2990 }
2991 } else {
2992 if (gctx->ctr) {
2993 size_t bulk = 0;
5158c763 2994#if defined(AES_GCM_ASM)
0f113f3e
MC
2995 if (len >= 16 && AES_GCM_ASM(gctx)) {
2996 size_t res = (16 - gctx->gcm.mres) % 16;
2997
2998 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
2999 return -1;
3000
3001 bulk = AES_gcm_decrypt(in + res,
3002 out + res, len - res,
3003 gctx->gcm.key,
3004 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3005 gctx->gcm.len.u[1] += bulk;
3006 bulk += res;
3007 }
5158c763 3008#endif
0f113f3e
MC
3009 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3010 in + bulk,
3011 out + bulk,
3012 len - bulk, gctx->ctr))
3013 return -1;
3014 } else {
3015 size_t bulk = 0;
5158c763 3016#if defined(AES_GCM_ASM2)
0f113f3e
MC
3017 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3018 size_t res = (16 - gctx->gcm.mres) % 16;
3019
3020 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3021 return -1;
3022
3023 bulk = AES_gcm_decrypt(in + res,
3024 out + res, len - res,
3025 gctx->gcm.key,
3026 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3027 gctx->gcm.len.u[1] += bulk;
3028 bulk += res;
3029 }
5158c763 3030#endif
0f113f3e
MC
3031 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3032 in + bulk, out + bulk, len - bulk))
3033 return -1;
3034 }
3035 }
3036 return len;
3037 } else {
c5307d9c 3038 if (!ctx->encrypt) {
0f113f3e
MC
3039 if (gctx->taglen < 0)
3040 return -1;
c5307d9c 3041 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
0f113f3e
MC
3042 return -1;
3043 gctx->iv_set = 0;
3044 return 0;
3045 }
c5307d9c 3046 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
0f113f3e
MC
3047 gctx->taglen = 16;
3048 /* Don't reuse the IV */
3049 gctx->iv_set = 0;
3050 return 0;
3051 }
3052
3053}
3054
5158c763 3055#define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
0f113f3e
MC
3056 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3057 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3058 | EVP_CIPH_CUSTOM_COPY)
3059
3060BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3061 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3062 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3063 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3064 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3065 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
32a2d8dd
DSH
3066
3067static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3068{
2c840201
P
3069 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3070
0f113f3e
MC
3071 if (type == EVP_CTRL_COPY) {
3072 EVP_CIPHER_CTX *out = ptr;
6435f0f6 3073 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
2c840201 3074
0f113f3e
MC
3075 if (xctx->xts.key1) {
3076 if (xctx->xts.key1 != &xctx->ks1)
3077 return 0;
3078 xctx_out->xts.key1 = &xctx_out->ks1;
3079 }
3080 if (xctx->xts.key2) {
3081 if (xctx->xts.key2 != &xctx->ks2)
3082 return 0;
3083 xctx_out->xts.key2 = &xctx_out->ks2;
3084 }
3085 return 1;
3086 } else if (type != EVP_CTRL_INIT)
3087 return -1;
3088 /* key1 and key2 are used as an indicator both key and IV are set */
3089 xctx->xts.key1 = NULL;
3090 xctx->xts.key2 = NULL;
3091 return 1;
3092}
32a2d8dd
DSH
3093
3094static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3095 const unsigned char *iv, int enc)
3096{
6435f0f6 3097 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 3098
0f113f3e
MC
3099 if (!iv && !key)
3100 return 1;
3101
3538b0f7 3102 if (key) {
0f113f3e 3103 do {
3538b0f7
P
3104 /* The key is two half length keys in reality */
3105 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3106 const int bits = bytes * 8;
3107
3108 /*
3109 * Verify that the two keys are different.
3110 *
3111 * This addresses the vulnerability described in Rogaway's
3112 * September 2004 paper:
3113 *
3114 * "Efficient Instantiations of Tweakable Blockciphers and
3115 * Refinements to Modes OCB and PMAC".
3116 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3117 *
3118 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3119 * that:
3120 * "The check for Key_1 != Key_2 shall be done at any place
3121 * BEFORE using the keys in the XTS-AES algorithm to process
3122 * data with them."
3123 */
2c840201
P
3124 if ((!allow_insecure_decrypt || enc)
3125 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3538b0f7
P
3126 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
3127 return 0;
3128 }
3129
5158c763 3130#ifdef AES_XTS_ASM
0f113f3e 3131 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
5158c763 3132#else
0f113f3e 3133 xctx->stream = NULL;
5158c763 3134#endif
0f113f3e 3135 /* key_len is two AES keys */
5158c763 3136#ifdef HWAES_CAPABLE
0f113f3e
MC
3137 if (HWAES_CAPABLE) {
3138 if (enc) {
3538b0f7 3139 HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e 3140 xctx->xts.block1 = (block128_f) HWAES_encrypt;
46f047d7
AP
3141# ifdef HWAES_xts_encrypt
3142 xctx->stream = HWAES_xts_encrypt;
3143# endif
0f113f3e 3144 } else {
3538b0f7 3145 HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e 3146 xctx->xts.block1 = (block128_f) HWAES_decrypt;
46f047d7
AP
3147# ifdef HWAES_xts_decrypt
3148 xctx->stream = HWAES_xts_decrypt;
3149#endif
0f113f3e
MC
3150 }
3151
3538b0f7 3152 HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3153 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3154
3155 xctx->xts.key1 = &xctx->ks1;
3156 break;
3157 } else
5158c763
MC
3158#endif
3159#ifdef BSAES_CAPABLE
0f113f3e
MC
3160 if (BSAES_CAPABLE)
3161 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3162 else
5158c763
MC
3163#endif
3164#ifdef VPAES_CAPABLE
0f113f3e
MC
3165 if (VPAES_CAPABLE) {
3166 if (enc) {
3538b0f7 3167 vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3168 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3169 } else {
3538b0f7 3170 vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3171 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3172 }
3173
3538b0f7 3174 vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3175 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3176
3177 xctx->xts.key1 = &xctx->ks1;
3178 break;
3179 } else
5158c763 3180#endif
0f113f3e
MC
3181 (void)0; /* terminate potentially open 'else' */
3182
3183 if (enc) {
3538b0f7 3184 AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3185 xctx->xts.block1 = (block128_f) AES_encrypt;
3186 } else {
3538b0f7 3187 AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3188 xctx->xts.block1 = (block128_f) AES_decrypt;
3189 }
3190
3538b0f7 3191 AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3192 xctx->xts.block2 = (block128_f) AES_encrypt;
3193
3194 xctx->xts.key1 = &xctx->ks1;
3195 } while (0);
3538b0f7 3196 }
0f113f3e
MC
3197
3198 if (iv) {
3199 xctx->xts.key2 = &xctx->ks2;
6435f0f6 3200 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
0f113f3e
MC
3201 }
3202
3203 return 1;
3204}
32a2d8dd 3205
17f121de 3206static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3207 const unsigned char *in, size_t len)
3208{
6435f0f6 3209 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
95eda4f0
P
3210
3211 if (xctx->xts.key1 == NULL
3212 || xctx->xts.key2 == NULL
3213 || out == NULL
3214 || in == NULL
3215 || len < AES_BLOCK_SIZE)
0f113f3e 3216 return 0;
95eda4f0 3217
5516c19b
P
3218 /*
3219 * Impose a limit of 2^20 blocks per data unit as specifed by
3220 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3221 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3222 * NIST SP 800-38E mandates the same limit.
3223 */
3224 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3225 EVPerr(EVP_F_AES_XTS_CIPHER, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3226 return 0;
3227 }
3228
0f113f3e
MC
3229 if (xctx->stream)
3230 (*xctx->stream) (in, out, len,
6435f0f6
RL
3231 xctx->xts.key1, xctx->xts.key2,
3232 EVP_CIPHER_CTX_iv_noconst(ctx));
3233 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3234 in, out, len,
3235 EVP_CIPHER_CTX_encrypting(ctx)))
0f113f3e
MC
3236 return 0;
3237 return 1;
3238}
3239
5158c763 3240#define aes_xts_cleanup NULL
0f113f3e 3241
5158c763 3242#define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
0f113f3e
MC
3243 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3244 | EVP_CIPH_CUSTOM_COPY)
3245
3246BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3247 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
23916810
DSH
3248
3249static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3250{
6435f0f6 3251 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
0f113f3e
MC
3252 switch (type) {
3253 case EVP_CTRL_INIT:
3254 cctx->key_set = 0;
3255 cctx->iv_set = 0;
3256 cctx->L = 8;
3257 cctx->M = 12;
3258 cctx->tag_set = 0;
3259 cctx->len_set = 0;
e75c5a79
DSH
3260 cctx->tls_aad_len = -1;
3261 return 1;
3262
3263 case EVP_CTRL_AEAD_TLS1_AAD:
3264 /* Save the AAD for later use */
3265 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3266 return 0;
6435f0f6 3267 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
e75c5a79
DSH
3268 cctx->tls_aad_len = arg;
3269 {
6435f0f6
RL
3270 uint16_t len =
3271 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3272 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
e75c5a79 3273 /* Correct length for explicit IV */
2198b3a5
AP
3274 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3275 return 0;
e75c5a79
DSH
3276 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3277 /* If decrypting correct for tag too */
2198b3a5
AP
3278 if (!EVP_CIPHER_CTX_encrypting(c)) {
3279 if (len < cctx->M)
3280 return 0;
e75c5a79 3281 len -= cctx->M;
2198b3a5 3282 }
6435f0f6
RL
3283 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3284 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
e75c5a79
DSH
3285 }
3286 /* Extra padding: tag appended to record */
3287 return cctx->M;
3288
3289 case EVP_CTRL_CCM_SET_IV_FIXED:
3290 /* Sanity check length */
3291 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3292 return 0;
3293 /* Just copy to first part of IV */
6435f0f6 3294 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
0f113f3e
MC
3295 return 1;
3296
e640fa02 3297 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e 3298 arg = 15 - arg;
018fcbec 3299 /* fall thru */
0f113f3e
MC
3300 case EVP_CTRL_CCM_SET_L:
3301 if (arg < 2 || arg > 8)
3302 return 0;
3303 cctx->L = arg;
3304 return 1;
3305
e640fa02 3306 case EVP_CTRL_AEAD_SET_TAG:
0f113f3e
MC
3307 if ((arg & 1) || arg < 4 || arg > 16)
3308 return 0;
6435f0f6 3309 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
0f113f3e
MC
3310 return 0;
3311 if (ptr) {
3312 cctx->tag_set = 1;
6435f0f6 3313 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
0f113f3e
MC
3314 }
3315 cctx->M = arg;
3316 return 1;
3317
e640fa02 3318 case EVP_CTRL_AEAD_GET_TAG:
6435f0f6 3319 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
0f113f3e
MC
3320 return 0;
3321 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3322 return 0;
3323 cctx->tag_set = 0;
3324 cctx->iv_set = 0;
3325 cctx->len_set = 0;
3326 return 1;
3327
3328 case EVP_CTRL_COPY:
3329 {
3330 EVP_CIPHER_CTX *out = ptr;
6435f0f6 3331 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
0f113f3e
MC
3332 if (cctx->ccm.key) {
3333 if (cctx->ccm.key != &cctx->ks)
3334 return 0;
3335 cctx_out->ccm.key = &cctx_out->ks;
3336 }
3337 return 1;
3338 }
3339
3340 default:
3341 return -1;
3342
3343 }
3344}
23916810
DSH
3345
3346static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3347 const unsigned char *iv, int enc)
3348{
6435f0f6 3349 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
3350 if (!iv && !key)
3351 return 1;
3352 if (key)
3353 do {
5158c763 3354#ifdef HWAES_CAPABLE
0f113f3e 3355 if (HWAES_CAPABLE) {
6435f0f6
RL
3356 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3357 &cctx->ks.ks);
0f113f3e
MC
3358
3359 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3360 &cctx->ks, (block128_f) HWAES_encrypt);
3361 cctx->str = NULL;
3362 cctx->key_set = 1;
3363 break;
3364 } else
5158c763
MC
3365#endif
3366#ifdef VPAES_CAPABLE
0f113f3e 3367 if (VPAES_CAPABLE) {
6435f0f6
RL
3368 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3369 &cctx->ks.ks);
0f113f3e
MC
3370 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3371 &cctx->ks, (block128_f) vpaes_encrypt);
3372 cctx->str = NULL;
3373 cctx->key_set = 1;
3374 break;
3375 }
5158c763 3376#endif
6435f0f6
RL
3377 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3378 &cctx->ks.ks);
0f113f3e
MC
3379 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3380 &cctx->ks, (block128_f) AES_encrypt);
3381 cctx->str = NULL;
3382 cctx->key_set = 1;
3383 } while (0);
3384 if (iv) {
6435f0f6 3385 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
0f113f3e
MC
3386 cctx->iv_set = 1;
3387 }
3388 return 1;
3389}
23916810 3390
e75c5a79
DSH
3391static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3392 const unsigned char *in, size_t len)
3393{
6435f0f6 3394 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
e75c5a79
DSH
3395 CCM128_CONTEXT *ccm = &cctx->ccm;
3396 /* Encrypt/decrypt must be performed in place */
3397 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3398 return -1;
3399 /* If encrypting set explicit IV from sequence number (start of AAD) */
6435f0f6
RL
3400 if (EVP_CIPHER_CTX_encrypting(ctx))
3401 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3402 EVP_CCM_TLS_EXPLICIT_IV_LEN);
e75c5a79 3403 /* Get rest of IV from explicit IV */
6435f0f6
RL
3404 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3405 EVP_CCM_TLS_EXPLICIT_IV_LEN);
e75c5a79
DSH
3406 /* Correct length value */
3407 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
6435f0f6
RL
3408 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3409 len))
e75c5a79
DSH
3410 return -1;
3411 /* Use saved AAD */
6435f0f6 3412 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
e75c5a79
DSH
3413 /* Fix buffer to point to payload */
3414 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3415 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
6435f0f6 3416 if (EVP_CIPHER_CTX_encrypting(ctx)) {
e75c5a79
DSH
3417 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3418 cctx->str) :
3419 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3420 return -1;
3421 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3422 return -1;
3423 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3424 } else {
3425 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3426 cctx->str) :
3427 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3428 unsigned char tag[16];
3429 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3430 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3431 return len;
3432 }
3433 }
3434 OPENSSL_cleanse(out, len);
3435 return -1;
3436 }
3437}
3438
17f121de 3439static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3440 const unsigned char *in, size_t len)
3441{
6435f0f6 3442 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
3443 CCM128_CONTEXT *ccm = &cctx->ccm;
3444 /* If not set up, return error */
e75c5a79
DSH
3445 if (!cctx->key_set)
3446 return -1;
3447
3448 if (cctx->tls_aad_len >= 0)
3449 return aes_ccm_tls_cipher(ctx, out, in, len);
3450
197421b1
DSH
3451 /* EVP_*Final() doesn't return any data */
3452 if (in == NULL && out != NULL)
3453 return 0;
3454
e75c5a79 3455 if (!cctx->iv_set)
0f113f3e 3456 return -1;
e75c5a79 3457
0f113f3e
MC
3458 if (!out) {
3459 if (!in) {
6435f0f6
RL
3460 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3461 15 - cctx->L, len))
0f113f3e
MC
3462 return -1;
3463 cctx->len_set = 1;
3464 return len;
3465 }
3466 /* If have AAD need message length */
3467 if (!cctx->len_set && len)
3468 return -1;
3469 CRYPTO_ccm128_aad(ccm, in, len);
3470 return len;
3471 }
67c81ec3
TN
3472
3473 /* The tag must be set before actually decrypting data */
3474 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3475 return -1;
3476
0f113f3e
MC
3477 /* If not set length yet do it */
3478 if (!cctx->len_set) {
6435f0f6
RL
3479 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3480 15 - cctx->L, len))
0f113f3e
MC
3481 return -1;
3482 cctx->len_set = 1;
3483 }
6435f0f6 3484 if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3485 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3486 cctx->str) :
3487 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3488 return -1;
3489 cctx->tag_set = 1;
3490 return len;
3491 } else {
3492 int rv = -1;
3493 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3494 cctx->str) :
3495 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3496 unsigned char tag[16];
3497 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
6435f0f6
RL
3498 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3499 cctx->M))
0f113f3e
MC
3500 rv = len;
3501 }
3502 }
3503 if (rv == -1)
3504 OPENSSL_cleanse(out, len);
3505 cctx->iv_set = 0;
3506 cctx->tag_set = 0;
3507 cctx->len_set = 0;
3508 return rv;
3509 }
0f113f3e
MC
3510}
3511
5158c763 3512#define aes_ccm_cleanup NULL
0f113f3e 3513
e75c5a79
DSH
3514BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3515 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3516 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3517 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3518 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3519 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
0f113f3e
MC
3520
3521typedef struct {
3522 union {
39147079 3523 OSSL_UNION_ALIGN;
0f113f3e
MC
3524 AES_KEY ks;
3525 } ks;
3526 /* Indicates if IV has been set */
3527 unsigned char *iv;
3528} EVP_AES_WRAP_CTX;
97cf1f6c
DSH
3529
3530static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3531 const unsigned char *iv, int enc)
3532{
6435f0f6 3533 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
0f113f3e
MC
3534 if (!iv && !key)
3535 return 1;
3536 if (key) {
6435f0f6
RL
3537 if (EVP_CIPHER_CTX_encrypting(ctx))
3538 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3539 &wctx->ks.ks);
0f113f3e 3540 else
6435f0f6
RL
3541 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3542 &wctx->ks.ks);
0f113f3e
MC
3543 if (!iv)
3544 wctx->iv = NULL;
3545 }
3546 if (iv) {
6435f0f6
RL
3547 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3548 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
0f113f3e
MC
3549 }
3550 return 1;
3551}
97cf1f6c
DSH
3552
3553static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3554 const unsigned char *in, size_t inlen)
3555{
6435f0f6 3556 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
0f113f3e
MC
3557 size_t rv;
3558 /* AES wrap with padding has IV length of 4, without padding 8 */
3559 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3560 /* No final operation so always return zero length */
3561 if (!in)
3562 return 0;
3563 /* Input length must always be non-zero */
3564 if (!inlen)
3565 return -1;
3566 /* If decrypting need at least 16 bytes and multiple of 8 */
6435f0f6 3567 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
0f113f3e
MC
3568 return -1;
3569 /* If not padding input must be multiple of 8 */
3570 if (!pad && inlen & 0x7)
3571 return -1;
7141ba31
MC
3572 if (is_partially_overlapping(out, in, inlen)) {
3573 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3574 return 0;
3575 }
0f113f3e 3576 if (!out) {
6435f0f6 3577 if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3578 /* If padding round up to multiple of 8 */
3579 if (pad)
3580 inlen = (inlen + 7) / 8 * 8;
3581 /* 8 byte prefix */
3582 return inlen + 8;
3583 } else {
3584 /*
3585 * If not padding output will be exactly 8 bytes smaller than
3586 * input. If padding it will be at least 8 bytes smaller but we
3587 * don't know how much.
3588 */
3589 return inlen - 8;
3590 }
3591 }
3592 if (pad) {
6435f0f6 3593 if (EVP_CIPHER_CTX_encrypting(ctx))
0f113f3e
MC
3594 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3595 out, in, inlen,
3596 (block128_f) AES_encrypt);
3597 else
3598 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3599 out, in, inlen,
3600 (block128_f) AES_decrypt);
3601 } else {
6435f0f6 3602 if (EVP_CIPHER_CTX_encrypting(ctx))
0f113f3e
MC
3603 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3604 out, in, inlen, (block128_f) AES_encrypt);
3605 else
3606 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3607 out, in, inlen, (block128_f) AES_decrypt);
3608 }
3609 return rv ? (int)rv : -1;
3610}
3611
5158c763 3612#define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
0f113f3e
MC
3613 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3614 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
97cf1f6c
DSH
3615
3616static const EVP_CIPHER aes_128_wrap = {
0f113f3e
MC
3617 NID_id_aes128_wrap,
3618 8, 16, 8, WRAP_FLAGS,
3619 aes_wrap_init_key, aes_wrap_cipher,
3620 NULL,
3621 sizeof(EVP_AES_WRAP_CTX),
3622 NULL, NULL, NULL, NULL
3623};
97cf1f6c
DSH
3624
3625const EVP_CIPHER *EVP_aes_128_wrap(void)
0f113f3e
MC
3626{
3627 return &aes_128_wrap;
3628}
97cf1f6c
DSH
3629
3630static const EVP_CIPHER aes_192_wrap = {
0f113f3e
MC
3631 NID_id_aes192_wrap,
3632 8, 24, 8, WRAP_FLAGS,
3633 aes_wrap_init_key, aes_wrap_cipher,
3634 NULL,
3635 sizeof(EVP_AES_WRAP_CTX),
3636 NULL, NULL, NULL, NULL
3637};
97cf1f6c
DSH
3638
3639const EVP_CIPHER *EVP_aes_192_wrap(void)
0f113f3e
MC
3640{
3641 return &aes_192_wrap;
3642}
97cf1f6c
DSH
3643
3644static const EVP_CIPHER aes_256_wrap = {
0f113f3e
MC
3645 NID_id_aes256_wrap,
3646 8, 32, 8, WRAP_FLAGS,
3647 aes_wrap_init_key, aes_wrap_cipher,
3648 NULL,
3649 sizeof(EVP_AES_WRAP_CTX),
3650 NULL, NULL, NULL, NULL
3651};
97cf1f6c
DSH
3652
3653const EVP_CIPHER *EVP_aes_256_wrap(void)
0f113f3e
MC
3654{
3655 return &aes_256_wrap;
3656}
97cf1f6c 3657
d31fed73 3658static const EVP_CIPHER aes_128_wrap_pad = {
0f113f3e
MC
3659 NID_id_aes128_wrap_pad,
3660 8, 16, 4, WRAP_FLAGS,
3661 aes_wrap_init_key, aes_wrap_cipher,
3662 NULL,
3663 sizeof(EVP_AES_WRAP_CTX),
3664 NULL, NULL, NULL, NULL
3665};
d31fed73
DSH
3666
3667const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
0f113f3e
MC
3668{
3669 return &aes_128_wrap_pad;
3670}
d31fed73
DSH
3671
3672static const EVP_CIPHER aes_192_wrap_pad = {
0f113f3e
MC
3673 NID_id_aes192_wrap_pad,
3674 8, 24, 4, WRAP_FLAGS,
3675 aes_wrap_init_key, aes_wrap_cipher,
3676 NULL,
3677 sizeof(EVP_AES_WRAP_CTX),
3678 NULL, NULL, NULL, NULL
3679};
d31fed73
DSH
3680
3681const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
0f113f3e
MC
3682{
3683 return &aes_192_wrap_pad;
3684}
d31fed73
DSH
3685
3686static const EVP_CIPHER aes_256_wrap_pad = {
0f113f3e
MC
3687 NID_id_aes256_wrap_pad,
3688 8, 32, 4, WRAP_FLAGS,
3689 aes_wrap_init_key, aes_wrap_cipher,
3690 NULL,
3691 sizeof(EVP_AES_WRAP_CTX),
3692 NULL, NULL, NULL, NULL
3693};
d31fed73
DSH
3694
3695const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
0f113f3e
MC
3696{
3697 return &aes_256_wrap_pad;
3698}
d31fed73 3699
5158c763 3700#ifndef OPENSSL_NO_OCB
e6b336ef 3701static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3702{
6435f0f6 3703 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
0f113f3e
MC
3704 EVP_CIPHER_CTX *newc;
3705 EVP_AES_OCB_CTX *new_octx;
3706
3707 switch (type) {
3708 case EVP_CTRL_INIT:
3709 octx->key_set = 0;
3710 octx->iv_set = 0;
6435f0f6
RL
3711 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
3712 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
0f113f3e
MC
3713 octx->taglen = 16;
3714 octx->data_buf_len = 0;
3715 octx->aad_buf_len = 0;
3716 return 1;
3717
e640fa02 3718 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e
MC
3719 /* IV len must be 1 to 15 */
3720 if (arg <= 0 || arg > 15)
3721 return 0;
3722
3723 octx->ivlen = arg;
3724 return 1;
3725
e640fa02 3726 case EVP_CTRL_AEAD_SET_TAG:
d57d135c
MC
3727 if (!ptr) {
3728 /* Tag len must be 0 to 16 */
3729 if (arg < 0 || arg > 16)
3730 return 0;
3731
3732 octx->taglen = arg;
3733 return 1;
3734 }
6435f0f6 3735 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
0f113f3e
MC
3736 return 0;
3737 memcpy(octx->tag, ptr, arg);
3738 return 1;
3739
e640fa02 3740 case EVP_CTRL_AEAD_GET_TAG:
6435f0f6 3741 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
0f113f3e
MC
3742 return 0;
3743
3744 memcpy(ptr, octx->tag, arg);
3745 return 1;
3746
3747 case EVP_CTRL_COPY:
3748 newc = (EVP_CIPHER_CTX *)ptr;
6435f0f6 3749 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
0f113f3e 3750 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
bdc985b1
AP
3751 &new_octx->ksenc.ks,
3752 &new_octx->ksdec.ks);
0f113f3e
MC
3753
3754 default:
3755 return -1;
3756
3757 }
3758}
e6b336ef
MC
3759
3760static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3761 const unsigned char *iv, int enc)
3762{
6435f0f6 3763 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
3764 if (!iv && !key)
3765 return 1;
3766 if (key) {
3767 do {
3768 /*
3769 * We set both the encrypt and decrypt key here because decrypt
3770 * needs both. We could possibly optimise to remove setting the
3771 * decrypt for an encryption operation.
3772 */
5158c763 3773# ifdef HWAES_CAPABLE
02dc0b82 3774 if (HWAES_CAPABLE) {
6435f0f6
RL
3775 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3776 &octx->ksenc.ks);
3777 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3778 &octx->ksdec.ks);
02dc0b82
AP
3779 if (!CRYPTO_ocb128_init(&octx->ocb,
3780 &octx->ksenc.ks, &octx->ksdec.ks,
3781 (block128_f) HWAES_encrypt,
3782 (block128_f) HWAES_decrypt,
3783 enc ? HWAES_ocb_encrypt
3784 : HWAES_ocb_decrypt))
3785 return 0;
3786 break;
3787 }
5158c763
MC
3788# endif
3789# ifdef VPAES_CAPABLE
0f113f3e 3790 if (VPAES_CAPABLE) {
6435f0f6
RL
3791 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3792 &octx->ksenc.ks);
3793 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3794 &octx->ksdec.ks);
bdc985b1
AP
3795 if (!CRYPTO_ocb128_init(&octx->ocb,
3796 &octx->ksenc.ks, &octx->ksdec.ks,
3797 (block128_f) vpaes_encrypt,
bd30091c
AP
3798 (block128_f) vpaes_decrypt,
3799 NULL))
0f113f3e
MC
3800 return 0;
3801 break;
3802 }
5158c763 3803# endif
6435f0f6
RL
3804 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3805 &octx->ksenc.ks);
3806 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3807 &octx->ksdec.ks);
bdc985b1
AP
3808 if (!CRYPTO_ocb128_init(&octx->ocb,
3809 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 3810 (block128_f) AES_encrypt,
bd30091c
AP
3811 (block128_f) AES_decrypt,
3812 NULL))
0f113f3e
MC
3813 return 0;
3814 }
3815 while (0);
3816
3817 /*
3818 * If we have an iv we can set it directly, otherwise use saved IV.
3819 */
3820 if (iv == NULL && octx->iv_set)
3821 iv = octx->iv;
3822 if (iv) {
3823 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3824 != 1)
3825 return 0;
3826 octx->iv_set = 1;
3827 }
3828 octx->key_set = 1;
3829 } else {
3830 /* If key set use IV, otherwise copy */
3831 if (octx->key_set)
3832 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3833 else
3834 memcpy(octx->iv, iv, octx->ivlen);
3835 octx->iv_set = 1;
3836 }
3837 return 1;
3838}
e6b336ef
MC
3839
3840static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3841 const unsigned char *in, size_t len)
3842{
3843 unsigned char *buf;
3844 int *buf_len;
3845 int written_len = 0;
3846 size_t trailing_len;
6435f0f6 3847 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
3848
3849 /* If IV or Key not set then return error */
3850 if (!octx->iv_set)
3851 return -1;
3852
3853 if (!octx->key_set)
3854 return -1;
3855
0ba5a9ea 3856 if (in != NULL) {
0f113f3e
MC
3857 /*
3858 * Need to ensure we are only passing full blocks to low level OCB
3859 * routines. We do it here rather than in EVP_EncryptUpdate/
3860 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3861 * and those routines don't support that
3862 */
3863
3864 /* Are we dealing with AAD or normal data here? */
3865 if (out == NULL) {
3866 buf = octx->aad_buf;
3867 buf_len = &(octx->aad_buf_len);
3868 } else {
3869 buf = octx->data_buf;
3870 buf_len = &(octx->data_buf_len);
7141ba31
MC
3871
3872 if (is_partially_overlapping(out + *buf_len, in, len)) {
3873 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3874 return 0;
3875 }
0f113f3e
MC
3876 }
3877
3878 /*
3879 * If we've got a partially filled buffer from a previous call then
3880 * use that data first
3881 */
0ba5a9ea 3882 if (*buf_len > 0) {
0f113f3e
MC
3883 unsigned int remaining;
3884
0ba5a9ea 3885 remaining = AES_BLOCK_SIZE - (*buf_len);
0f113f3e
MC
3886 if (remaining > len) {
3887 memcpy(buf + (*buf_len), in, len);
3888 *(buf_len) += len;
3889 return 0;
3890 }
3891 memcpy(buf + (*buf_len), in, remaining);
3892
3893 /*
3894 * If we get here we've filled the buffer, so process it
3895 */
3896 len -= remaining;
3897 in += remaining;
3898 if (out == NULL) {
0ba5a9ea 3899 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
0f113f3e 3900 return -1;
6435f0f6 3901 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
0ba5a9ea
MC
3902 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3903 AES_BLOCK_SIZE))
0f113f3e
MC
3904 return -1;
3905 } else {
0ba5a9ea
MC
3906 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3907 AES_BLOCK_SIZE))
0f113f3e
MC
3908 return -1;
3909 }
0ba5a9ea 3910 written_len = AES_BLOCK_SIZE;
0f113f3e 3911 *buf_len = 0;
7c12c7b6
MC
3912 if (out != NULL)
3913 out += AES_BLOCK_SIZE;
0f113f3e
MC
3914 }
3915
3916 /* Do we have a partial block to handle at the end? */
0ba5a9ea 3917 trailing_len = len % AES_BLOCK_SIZE;
0f113f3e
MC
3918
3919 /*
3920 * If we've got some full blocks to handle, then process these first
3921 */
3922 if (len != trailing_len) {
3923 if (out == NULL) {
3924 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3925 return -1;
6435f0f6 3926 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3927 if (!CRYPTO_ocb128_encrypt
3928 (&octx->ocb, in, out, len - trailing_len))
3929 return -1;
3930 } else {
3931 if (!CRYPTO_ocb128_decrypt
3932 (&octx->ocb, in, out, len - trailing_len))
3933 return -1;
3934 }
3935 written_len += len - trailing_len;
3936 in += len - trailing_len;
3937 }
3938
3939 /* Handle any trailing partial block */
0ba5a9ea 3940 if (trailing_len > 0) {
0f113f3e
MC
3941 memcpy(buf, in, trailing_len);
3942 *buf_len = trailing_len;
3943 }
3944
3945 return written_len;
3946 } else {
3947 /*
3948 * First of all empty the buffer of any partial block that we might
3949 * have been provided - both for data and AAD
3950 */
0ba5a9ea 3951 if (octx->data_buf_len > 0) {
6435f0f6 3952 if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3953 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
3954 octx->data_buf_len))
3955 return -1;
3956 } else {
3957 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
3958 octx->data_buf_len))
3959 return -1;
3960 }
3961 written_len = octx->data_buf_len;
3962 octx->data_buf_len = 0;
3963 }
0ba5a9ea 3964 if (octx->aad_buf_len > 0) {
0f113f3e
MC
3965 if (!CRYPTO_ocb128_aad
3966 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
3967 return -1;
3968 octx->aad_buf_len = 0;
3969 }
3970 /* If decrypting then verify */
6435f0f6 3971 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3972 if (octx->taglen < 0)
3973 return -1;
3974 if (CRYPTO_ocb128_finish(&octx->ocb,
3975 octx->tag, octx->taglen) != 0)
3976 return -1;
3977 octx->iv_set = 0;
3978 return written_len;
3979 }
3980 /* If encrypting then just get the tag */
3981 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
3982 return -1;
3983 /* Don't reuse the IV */
3984 octx->iv_set = 0;
3985 return written_len;
3986 }
3987}
e6b336ef
MC
3988
3989static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
0f113f3e 3990{
6435f0f6 3991 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
0f113f3e
MC
3992 CRYPTO_ocb128_cleanup(&octx->ocb);
3993 return 1;
3994}
e6b336ef 3995
c4aede20
MC
3996BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
3997 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3998BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
3999 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4000BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4001 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
5158c763 4002#endif /* OPENSSL_NO_OCB */
b1ceb439
TS
4003
4004/* AES-SIV mode */
4005#ifndef OPENSSL_NO_SIV
4006
4007typedef SIV128_CONTEXT EVP_AES_SIV_CTX;
4008
4009#define aesni_siv_init_key aes_siv_init_key
4010static int aes_siv_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4011 const unsigned char *iv, int enc)
4012{
4013 const EVP_CIPHER *ctr;
4014 const EVP_CIPHER *cbc;
4015 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4016 int klen = EVP_CIPHER_CTX_key_length(ctx) / 2;
4017
4018 if (key == NULL)
4019 return 1;
4020
4021 switch (klen) {
4022 case 16:
4023 cbc = EVP_aes_128_cbc();
4024 ctr = EVP_aes_128_ctr();
4025 break;
4026 case 24:
4027 cbc = EVP_aes_192_cbc();
4028 ctr = EVP_aes_192_ctr();
4029 break;
4030 case 32:
4031 cbc = EVP_aes_256_cbc();
4032 ctr = EVP_aes_256_ctr();
4033 break;
4034 default:
4035 return 0;
4036 }
4037
4038 /* klen is the length of the underlying cipher, not the input key,
4039 which should be twice as long */
4040 return CRYPTO_siv128_init(sctx, key, klen, cbc, ctr);
4041}
4042
4043#define aesni_siv_cipher aes_siv_cipher
4044static int aes_siv_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4045 const unsigned char *in, size_t len)
4046{
4047 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4048
4049 /* EncryptFinal or DecryptFinal */
4050 if (in == NULL)
4051 return CRYPTO_siv128_finish(sctx);
4052
4053 /* Deal with associated data */
4054 if (out == NULL)
4055 return CRYPTO_siv128_aad(sctx, in, len);
4056
4057 if (EVP_CIPHER_CTX_encrypting(ctx))
4058 return CRYPTO_siv128_encrypt(sctx, in, out, len);
4059
4060 return CRYPTO_siv128_decrypt(sctx, in, out, len);
4061}
4062
4063#define aesni_siv_cleanup aes_siv_cleanup
4064static int aes_siv_cleanup(EVP_CIPHER_CTX *c)
4065{
4066 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4067
4068 return CRYPTO_siv128_cleanup(sctx);
4069}
4070
4071
4072#define aesni_siv_ctrl aes_siv_ctrl
4073static int aes_siv_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4074{
4075 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4076 SIV128_CONTEXT *sctx_out;
4077
4078 switch (type) {
4079 case EVP_CTRL_INIT:
4080 return CRYPTO_siv128_cleanup(sctx);
4081
4082 case EVP_CTRL_SET_SPEED:
4083 return CRYPTO_siv128_speed(sctx, arg);
4084
4085 case EVP_CTRL_AEAD_SET_TAG:
4086 if (!EVP_CIPHER_CTX_encrypting(c))
4087 return CRYPTO_siv128_set_tag(sctx, ptr, arg);
4088 return 1;
4089
4090 case EVP_CTRL_AEAD_GET_TAG:
4091 if (!EVP_CIPHER_CTX_encrypting(c))
4092 return 0;
4093 return CRYPTO_siv128_get_tag(sctx, ptr, arg);
4094
4095 case EVP_CTRL_COPY:
4096 sctx_out = EVP_C_DATA(SIV128_CONTEXT, (EVP_CIPHER_CTX*)ptr);
4097 return CRYPTO_siv128_copy_ctx(sctx_out, sctx);
4098
4099 default:
4100 return -1;
4101
4102 }
4103}
4104
4105#define SIV_FLAGS (EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
4106 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
4107 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CUSTOM_COPY \
4108 | EVP_CIPH_CTRL_INIT)
4109
4110BLOCK_CIPHER_custom(NID_aes, 128, 1, 0, siv, SIV, SIV_FLAGS)
4111BLOCK_CIPHER_custom(NID_aes, 192, 1, 0, siv, SIV, SIV_FLAGS)
4112BLOCK_CIPHER_custom(NID_aes, 256, 1, 0, siv, SIV, SIV_FLAGS)
4113#endif