]> git.ipfire.org Git - thirdparty/openssl.git/blame - crypto/evp/e_aes.c
Update copyright year
[thirdparty/openssl.git] / crypto / evp / e_aes.c
CommitLineData
aa6bb135 1/*
3c2bdd7d 2 * Copyright 2001-2021 The OpenSSL Project Authors. All Rights Reserved.
deb2c1a1 3 *
4a8b0c55 4 * Licensed under the Apache License 2.0 (the "License"). You may not use
aa6bb135
RS
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
deb2c1a1
DSH
8 */
9
c72fa255
MC
10/*
11 * This file uses the low level AES functions (which are deprecated for
12 * non-internal use) in order to implement the EVP AES ciphers.
13 */
14#include "internal/deprecated.h"
15
743694a6
MC
16#include <string.h>
17#include <assert.h>
8c84b677 18#include <openssl/opensslconf.h>
5158c763
MC
19#include <openssl/crypto.h>
20#include <openssl/evp.h>
21#include <openssl/err.h>
5158c763 22#include <openssl/aes.h>
743694a6
MC
23#include <openssl/rand.h>
24#include <openssl/cmac.h>
25f2138b 25#include "crypto/evp.h"
39147079 26#include "internal/cryptlib.h"
25f2138b
DMSP
27#include "crypto/modes.h"
28#include "crypto/siv.h"
cc731bc3 29#include "crypto/aes_platform.h"
706457b7 30#include "evp_local.h"
0f113f3e
MC
31
32typedef struct {
33 union {
39147079 34 OSSL_UNION_ALIGN;
0f113f3e
MC
35 AES_KEY ks;
36 } ks;
37 block128_f block;
38 union {
39 cbc128_f cbc;
40 ctr128_f ctr;
41 } stream;
42} EVP_AES_KEY;
43
44typedef struct {
45 union {
39147079 46 OSSL_UNION_ALIGN;
0f113f3e
MC
47 AES_KEY ks;
48 } ks; /* AES key schedule to use */
49 int key_set; /* Set if key initialised */
50 int iv_set; /* Set if an iv is set */
51 GCM128_CONTEXT gcm;
52 unsigned char *iv; /* Temporary IV store */
53 int ivlen; /* IV length */
54 int taglen;
55 int iv_gen; /* It is OK to generate IVs */
bcf082d1 56 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
0f113f3e 57 int tls_aad_len; /* TLS AAD length */
d6b34570 58 uint64_t tls_enc_records; /* Number of TLS records encrypted */
0f113f3e
MC
59 ctr128_f ctr;
60} EVP_AES_GCM_CTX;
61
62typedef struct {
63 union {
39147079 64 OSSL_UNION_ALIGN;
0f113f3e
MC
65 AES_KEY ks;
66 } ks1, ks2; /* AES key schedules to use */
67 XTS128_CONTEXT xts;
68 void (*stream) (const unsigned char *in,
69 unsigned char *out, size_t length,
70 const AES_KEY *key1, const AES_KEY *key2,
71 const unsigned char iv[16]);
72} EVP_AES_XTS_CTX;
73
f844f9eb 74#ifdef FIPS_MODULE
2c840201
P
75static const int allow_insecure_decrypt = 0;
76#else
77static const int allow_insecure_decrypt = 1;
78#endif
79
0f113f3e
MC
80typedef struct {
81 union {
39147079 82 OSSL_UNION_ALIGN;
0f113f3e
MC
83 AES_KEY ks;
84 } ks; /* AES key schedule to use */
85 int key_set; /* Set if key initialised */
86 int iv_set; /* Set if an iv is set */
87 int tag_set; /* Set if tag is valid */
88 int len_set; /* Set if message length set */
89 int L, M; /* L and M parameters from RFC3610 */
e75c5a79 90 int tls_aad_len; /* TLS AAD length */
0f113f3e
MC
91 CCM128_CONTEXT ccm;
92 ccm128_f str;
93} EVP_AES_CCM_CTX;
94
5158c763 95#ifndef OPENSSL_NO_OCB
0f113f3e 96typedef struct {
bdc985b1 97 union {
39147079 98 OSSL_UNION_ALIGN;
bdc985b1
AP
99 AES_KEY ks;
100 } ksenc; /* AES key schedule to use for encryption */
101 union {
39147079 102 OSSL_UNION_ALIGN;
bdc985b1
AP
103 AES_KEY ks;
104 } ksdec; /* AES key schedule to use for decryption */
0f113f3e
MC
105 int key_set; /* Set if key initialised */
106 int iv_set; /* Set if an iv is set */
107 OCB128_CONTEXT ocb;
108 unsigned char *iv; /* Temporary IV store */
109 unsigned char tag[16];
110 unsigned char data_buf[16]; /* Store partial data blocks */
111 unsigned char aad_buf[16]; /* Store partial AAD blocks */
112 int data_buf_len;
113 int aad_buf_len;
114 int ivlen; /* IV length */
115 int taglen;
116} EVP_AES_OCB_CTX;
5158c763 117#endif
e6b336ef 118
5158c763 119#define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
17f121de 120
03a5e5ae
PS
121/* increment counter (64-bit int) by 1 */
122static void ctr64_inc(unsigned char *counter)
123{
124 int n = 8;
125 unsigned char c;
126
127 do {
128 --n;
129 c = counter[n];
130 ++c;
131 counter[n] = c;
132 if (c)
133 return;
134 } while (n);
135}
136
459b15d4 137#if defined(AESNI_CAPABLE)
5158c763 138# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
5158c763 139# define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
0f113f3e 140 gctx->gcm.ghash==gcm_ghash_avx)
5158c763
MC
141# undef AES_GCM_ASM2 /* minor size optimization */
142# endif
4e049c52 143
17f121de 144static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
145 const unsigned char *iv, int enc)
146{
147 int ret, mode;
6435f0f6 148 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 149
6435f0f6 150 mode = EVP_CIPHER_CTX_mode(ctx);
0f113f3e
MC
151 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
152 && !enc) {
6435f0f6
RL
153 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
154 &dat->ks.ks);
0f113f3e
MC
155 dat->block = (block128_f) aesni_decrypt;
156 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
157 (cbc128_f) aesni_cbc_encrypt : NULL;
158 } else {
6435f0f6
RL
159 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
160 &dat->ks.ks);
0f113f3e
MC
161 dat->block = (block128_f) aesni_encrypt;
162 if (mode == EVP_CIPH_CBC_MODE)
163 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
164 else if (mode == EVP_CIPH_CTR_MODE)
165 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
166 else
167 dat->stream.cbc = NULL;
168 }
169
170 if (ret < 0) {
9311d0c4 171 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
172 return 0;
173 }
174
175 return 1;
176}
177
178static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
179 const unsigned char *in, size_t len)
d1fff483 180{
6435f0f6 181 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
9197c226 182 ctx->iv, EVP_CIPHER_CTX_encrypting(ctx));
d1fff483 183
0f113f3e 184 return 1;
d1fff483
AP
185}
186
0f113f3e
MC
187static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
188 const unsigned char *in, size_t len)
d1fff483 189{
6435f0f6 190 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
d1fff483 191
0f113f3e
MC
192 if (len < bl)
193 return 1;
d1fff483 194
6435f0f6
RL
195 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
196 EVP_CIPHER_CTX_encrypting(ctx));
d1fff483 197
0f113f3e 198 return 1;
d1fff483
AP
199}
200
5158c763 201# define aesni_ofb_cipher aes_ofb_cipher
0f113f3e
MC
202static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
203 const unsigned char *in, size_t len);
d1fff483 204
5158c763 205# define aesni_cfb_cipher aes_cfb_cipher
0f113f3e
MC
206static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
207 const unsigned char *in, size_t len);
d1fff483 208
5158c763 209# define aesni_cfb8_cipher aes_cfb8_cipher
0f113f3e
MC
210static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
211 const unsigned char *in, size_t len);
d1fff483 212
5158c763 213# define aesni_cfb1_cipher aes_cfb1_cipher
0f113f3e
MC
214static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
215 const unsigned char *in, size_t len);
d1fff483 216
5158c763 217# define aesni_ctr_cipher aes_ctr_cipher
17f121de 218static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 219 const unsigned char *in, size_t len);
d1fff483 220
17f121de 221static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
222 const unsigned char *iv, int enc)
223{
6435f0f6 224 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
225 if (!iv && !key)
226 return 1;
227 if (key) {
6435f0f6
RL
228 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
229 &gctx->ks.ks);
0f113f3e
MC
230 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
231 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
232 /*
233 * If we have an iv can set it directly, otherwise use saved IV.
234 */
235 if (iv == NULL && gctx->iv_set)
236 iv = gctx->iv;
237 if (iv) {
238 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
239 gctx->iv_set = 1;
240 }
241 gctx->key_set = 1;
242 } else {
243 /* If key set use IV, otherwise copy */
244 if (gctx->key_set)
245 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
246 else
247 memcpy(gctx->iv, iv, gctx->ivlen);
248 gctx->iv_set = 1;
249 gctx->iv_gen = 0;
250 }
251 return 1;
252}
253
5158c763 254# define aesni_gcm_cipher aes_gcm_cipher
17f121de 255static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 256 const unsigned char *in, size_t len);
17f121de
AP
257
258static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
259 const unsigned char *iv, int enc)
260{
6435f0f6 261 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 262
0f113f3e
MC
263 if (!iv && !key)
264 return 1;
265
266 if (key) {
3538b0f7
P
267 /* The key is two half length keys in reality */
268 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
269 const int bits = bytes * 8;
270
271 /*
272 * Verify that the two keys are different.
4bd8b240 273 *
3538b0f7
P
274 * This addresses Rogaway's vulnerability.
275 * See comment in aes_xts_init_key() below.
276 */
2c840201
P
277 if ((!allow_insecure_decrypt || enc)
278 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 279 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
280 return 0;
281 }
282
0f113f3e
MC
283 /* key_len is two AES keys */
284 if (enc) {
3538b0f7 285 aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
286 xctx->xts.block1 = (block128_f) aesni_encrypt;
287 xctx->stream = aesni_xts_encrypt;
288 } else {
3538b0f7 289 aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
290 xctx->xts.block1 = (block128_f) aesni_decrypt;
291 xctx->stream = aesni_xts_decrypt;
292 }
293
3538b0f7 294 aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
295 xctx->xts.block2 = (block128_f) aesni_encrypt;
296
297 xctx->xts.key1 = &xctx->ks1;
298 }
299
300 if (iv) {
301 xctx->xts.key2 = &xctx->ks2;
9197c226 302 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
303 }
304
305 return 1;
306}
307
5158c763 308# define aesni_xts_cipher aes_xts_cipher
17f121de 309static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 310 const unsigned char *in, size_t len);
17f121de
AP
311
312static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
313 const unsigned char *iv, int enc)
314{
6435f0f6 315 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
316 if (!iv && !key)
317 return 1;
318 if (key) {
6435f0f6
RL
319 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
320 &cctx->ks.ks);
0f113f3e
MC
321 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
322 &cctx->ks, (block128_f) aesni_encrypt);
323 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
324 (ccm128_f) aesni_ccm64_decrypt_blocks;
325 cctx->key_set = 1;
326 }
327 if (iv) {
9197c226 328 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
329 cctx->iv_set = 1;
330 }
331 return 1;
332}
333
5158c763 334# define aesni_ccm_cipher aes_ccm_cipher
17f121de 335static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 336 const unsigned char *in, size_t len);
17f121de 337
5158c763 338# ifndef OPENSSL_NO_OCB
e6b336ef 339static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
340 const unsigned char *iv, int enc)
341{
6435f0f6 342 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
343 if (!iv && !key)
344 return 1;
345 if (key) {
346 do {
347 /*
348 * We set both the encrypt and decrypt key here because decrypt
349 * needs both. We could possibly optimise to remove setting the
350 * decrypt for an encryption operation.
351 */
6435f0f6
RL
352 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
353 &octx->ksenc.ks);
354 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
355 &octx->ksdec.ks);
bdc985b1
AP
356 if (!CRYPTO_ocb128_init(&octx->ocb,
357 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 358 (block128_f) aesni_encrypt,
bd30091c
AP
359 (block128_f) aesni_decrypt,
360 enc ? aesni_ocb_encrypt
361 : aesni_ocb_decrypt))
0f113f3e
MC
362 return 0;
363 }
364 while (0);
365
366 /*
367 * If we have an iv we can set it directly, otherwise use saved IV.
368 */
369 if (iv == NULL && octx->iv_set)
370 iv = octx->iv;
371 if (iv) {
372 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
373 != 1)
374 return 0;
375 octx->iv_set = 1;
376 }
377 octx->key_set = 1;
378 } else {
379 /* If key set use IV, otherwise copy */
380 if (octx->key_set)
381 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
382 else
383 memcpy(octx->iv, iv, octx->ivlen);
384 octx->iv_set = 1;
385 }
386 return 1;
387}
388
5158c763 389# define aesni_ocb_cipher aes_ocb_cipher
e6b336ef 390static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 391 const unsigned char *in, size_t len);
5158c763 392# endif /* OPENSSL_NO_OCB */
e6b336ef 393
5158c763 394# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
17f121de 395static const EVP_CIPHER aesni_##keylen##_##mode = { \
0f113f3e
MC
396 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
397 flags|EVP_CIPH_##MODE##_MODE, \
398 aesni_init_key, \
399 aesni_##mode##_cipher, \
400 NULL, \
401 sizeof(EVP_AES_KEY), \
402 NULL,NULL,NULL,NULL }; \
17f121de 403static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
404 nid##_##keylen##_##nmode,blocksize, \
405 keylen/8,ivlen, \
406 flags|EVP_CIPH_##MODE##_MODE, \
407 aes_init_key, \
408 aes_##mode##_cipher, \
409 NULL, \
410 sizeof(EVP_AES_KEY), \
411 NULL,NULL,NULL,NULL }; \
17f121de 412const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
8ca28da0 413{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
17f121de 414
5158c763 415# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
17f121de 416static const EVP_CIPHER aesni_##keylen##_##mode = { \
0f113f3e 417 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
418 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
419 ivlen, \
0f113f3e
MC
420 flags|EVP_CIPH_##MODE##_MODE, \
421 aesni_##mode##_init_key, \
422 aesni_##mode##_cipher, \
423 aes_##mode##_cleanup, \
424 sizeof(EVP_AES_##MODE##_CTX), \
425 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de 426static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 427 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
428 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
429 ivlen, \
0f113f3e
MC
430 flags|EVP_CIPH_##MODE##_MODE, \
431 aes_##mode##_init_key, \
432 aes_##mode##_cipher, \
433 aes_##mode##_cleanup, \
434 sizeof(EVP_AES_##MODE##_CTX), \
435 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de 436const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
8ca28da0 437{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
d1fff483 438
459b15d4 439#elif defined(SPARC_AES_CAPABLE)
c5f6da54
AP
440
441static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
442 const unsigned char *iv, int enc)
443{
444 int ret, mode, bits;
6435f0f6 445 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 446
6435f0f6
RL
447 mode = EVP_CIPHER_CTX_mode(ctx);
448 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
0f113f3e
MC
449 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
450 && !enc) {
451 ret = 0;
6435f0f6 452 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
0f113f3e
MC
453 dat->block = (block128_f) aes_t4_decrypt;
454 switch (bits) {
455 case 128:
456 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
457 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
458 break;
459 case 192:
460 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
461 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
462 break;
463 case 256:
464 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
465 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
466 break;
467 default:
468 ret = -1;
469 }
470 } else {
471 ret = 0;
6435f0f6 472 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
0f113f3e
MC
473 dat->block = (block128_f) aes_t4_encrypt;
474 switch (bits) {
475 case 128:
476 if (mode == EVP_CIPH_CBC_MODE)
477 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
478 else if (mode == EVP_CIPH_CTR_MODE)
479 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
480 else
481 dat->stream.cbc = NULL;
482 break;
483 case 192:
484 if (mode == EVP_CIPH_CBC_MODE)
485 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
486 else if (mode == EVP_CIPH_CTR_MODE)
487 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
488 else
489 dat->stream.cbc = NULL;
490 break;
491 case 256:
492 if (mode == EVP_CIPH_CBC_MODE)
493 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
494 else if (mode == EVP_CIPH_CTR_MODE)
495 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
496 else
497 dat->stream.cbc = NULL;
498 break;
499 default:
500 ret = -1;
501 }
502 }
503
504 if (ret < 0) {
9311d0c4 505 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
506 return 0;
507 }
508
509 return 1;
510}
511
5158c763 512# define aes_t4_cbc_cipher aes_cbc_cipher
0f113f3e
MC
513static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
514 const unsigned char *in, size_t len);
515
5158c763 516# define aes_t4_ecb_cipher aes_ecb_cipher
0f113f3e
MC
517static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
518 const unsigned char *in, size_t len);
519
5158c763 520# define aes_t4_ofb_cipher aes_ofb_cipher
0f113f3e
MC
521static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
522 const unsigned char *in, size_t len);
523
5158c763 524# define aes_t4_cfb_cipher aes_cfb_cipher
0f113f3e
MC
525static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
526 const unsigned char *in, size_t len);
527
5158c763 528# define aes_t4_cfb8_cipher aes_cfb8_cipher
0f113f3e
MC
529static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
530 const unsigned char *in, size_t len);
531
5158c763 532# define aes_t4_cfb1_cipher aes_cfb1_cipher
0f113f3e
MC
533static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
534 const unsigned char *in, size_t len);
535
5158c763 536# define aes_t4_ctr_cipher aes_ctr_cipher
c5f6da54 537static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 538 const unsigned char *in, size_t len);
c5f6da54
AP
539
540static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
541 const unsigned char *iv, int enc)
542{
6435f0f6 543 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
544 if (!iv && !key)
545 return 1;
546 if (key) {
6435f0f6 547 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
0f113f3e
MC
548 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
549 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
550 (block128_f) aes_t4_encrypt);
551 switch (bits) {
552 case 128:
553 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
554 break;
555 case 192:
556 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
557 break;
558 case 256:
559 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
560 break;
561 default:
562 return 0;
563 }
564 /*
565 * If we have an iv can set it directly, otherwise use saved IV.
566 */
567 if (iv == NULL && gctx->iv_set)
568 iv = gctx->iv;
569 if (iv) {
570 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
571 gctx->iv_set = 1;
572 }
573 gctx->key_set = 1;
574 } else {
575 /* If key set use IV, otherwise copy */
576 if (gctx->key_set)
577 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
578 else
579 memcpy(gctx->iv, iv, gctx->ivlen);
580 gctx->iv_set = 1;
581 gctx->iv_gen = 0;
582 }
583 return 1;
584}
585
5158c763 586# define aes_t4_gcm_cipher aes_gcm_cipher
c5f6da54 587static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 588 const unsigned char *in, size_t len);
c5f6da54
AP
589
590static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
591 const unsigned char *iv, int enc)
592{
6435f0f6 593 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 594
0f113f3e
MC
595 if (!iv && !key)
596 return 1;
597
598 if (key) {
3538b0f7
P
599 /* The key is two half length keys in reality */
600 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
601 const int bits = bytes * 8;
602
603 /*
604 * Verify that the two keys are different.
4bd8b240 605 *
3538b0f7
P
606 * This addresses Rogaway's vulnerability.
607 * See comment in aes_xts_init_key() below.
608 */
2c840201
P
609 if ((!allow_insecure_decrypt || enc)
610 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 611 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
612 return 0;
613 }
614
0f113f3e
MC
615 xctx->stream = NULL;
616 /* key_len is two AES keys */
617 if (enc) {
618 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
619 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
620 switch (bits) {
621 case 128:
622 xctx->stream = aes128_t4_xts_encrypt;
623 break;
0f113f3e
MC
624 case 256:
625 xctx->stream = aes256_t4_xts_encrypt;
626 break;
627 default:
628 return 0;
629 }
630 } else {
3538b0f7 631 aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
632 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
633 switch (bits) {
634 case 128:
635 xctx->stream = aes128_t4_xts_decrypt;
636 break;
0f113f3e
MC
637 case 256:
638 xctx->stream = aes256_t4_xts_decrypt;
639 break;
640 default:
641 return 0;
642 }
643 }
644
3538b0f7 645 aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
646 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
647
648 xctx->xts.key1 = &xctx->ks1;
649 }
650
651 if (iv) {
652 xctx->xts.key2 = &xctx->ks2;
9197c226 653 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
654 }
655
656 return 1;
657}
658
5158c763 659# define aes_t4_xts_cipher aes_xts_cipher
c5f6da54 660static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 661 const unsigned char *in, size_t len);
c5f6da54
AP
662
663static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
664 const unsigned char *iv, int enc)
665{
6435f0f6 666 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
667 if (!iv && !key)
668 return 1;
669 if (key) {
6435f0f6 670 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
0f113f3e
MC
671 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
672 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
673 &cctx->ks, (block128_f) aes_t4_encrypt);
bdc985b1 674 cctx->str = NULL;
0f113f3e
MC
675 cctx->key_set = 1;
676 }
677 if (iv) {
9197c226 678 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
679 cctx->iv_set = 1;
680 }
681 return 1;
682}
683
5158c763 684# define aes_t4_ccm_cipher aes_ccm_cipher
c5f6da54 685static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 686 const unsigned char *in, size_t len);
c5f6da54 687
5158c763 688# ifndef OPENSSL_NO_OCB
e6b336ef 689static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
690 const unsigned char *iv, int enc)
691{
6435f0f6 692 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
693 if (!iv && !key)
694 return 1;
695 if (key) {
696 do {
697 /*
698 * We set both the encrypt and decrypt key here because decrypt
699 * needs both. We could possibly optimise to remove setting the
700 * decrypt for an encryption operation.
701 */
6435f0f6
RL
702 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
703 &octx->ksenc.ks);
704 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
705 &octx->ksdec.ks);
bdc985b1
AP
706 if (!CRYPTO_ocb128_init(&octx->ocb,
707 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 708 (block128_f) aes_t4_encrypt,
02dc0b82
AP
709 (block128_f) aes_t4_decrypt,
710 NULL))
0f113f3e
MC
711 return 0;
712 }
713 while (0);
714
715 /*
716 * If we have an iv we can set it directly, otherwise use saved IV.
717 */
718 if (iv == NULL && octx->iv_set)
719 iv = octx->iv;
720 if (iv) {
721 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
722 != 1)
723 return 0;
724 octx->iv_set = 1;
725 }
726 octx->key_set = 1;
727 } else {
728 /* If key set use IV, otherwise copy */
729 if (octx->key_set)
730 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
731 else
732 memcpy(octx->iv, iv, octx->ivlen);
733 octx->iv_set = 1;
734 }
735 return 1;
736}
737
5158c763 738# define aes_t4_ocb_cipher aes_ocb_cipher
e6b336ef 739static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 740 const unsigned char *in, size_t len);
5158c763 741# endif /* OPENSSL_NO_OCB */
e6b336ef 742
87d06aed
MC
743# ifndef OPENSSL_NO_SIV
744# define aes_t4_siv_init_key aes_siv_init_key
745# define aes_t4_siv_cipher aes_siv_cipher
746# endif /* OPENSSL_NO_SIV */
747
5158c763 748# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
c5f6da54 749static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
0f113f3e
MC
750 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
751 flags|EVP_CIPH_##MODE##_MODE, \
752 aes_t4_init_key, \
753 aes_t4_##mode##_cipher, \
754 NULL, \
755 sizeof(EVP_AES_KEY), \
756 NULL,NULL,NULL,NULL }; \
c5f6da54 757static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
758 nid##_##keylen##_##nmode,blocksize, \
759 keylen/8,ivlen, \
760 flags|EVP_CIPH_##MODE##_MODE, \
761 aes_init_key, \
762 aes_##mode##_cipher, \
763 NULL, \
764 sizeof(EVP_AES_KEY), \
765 NULL,NULL,NULL,NULL }; \
c5f6da54
AP
766const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
767{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
768
5158c763 769# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
c5f6da54 770static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
0f113f3e 771 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
772 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
773 ivlen, \
0f113f3e
MC
774 flags|EVP_CIPH_##MODE##_MODE, \
775 aes_t4_##mode##_init_key, \
776 aes_t4_##mode##_cipher, \
777 aes_##mode##_cleanup, \
778 sizeof(EVP_AES_##MODE##_CTX), \
779 NULL,NULL,aes_##mode##_ctrl,NULL }; \
c5f6da54 780static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 781 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
782 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
783 ivlen, \
0f113f3e
MC
784 flags|EVP_CIPH_##MODE##_MODE, \
785 aes_##mode##_init_key, \
786 aes_##mode##_cipher, \
787 aes_##mode##_cleanup, \
788 sizeof(EVP_AES_##MODE##_CTX), \
789 NULL,NULL,aes_##mode##_ctrl,NULL }; \
c5f6da54
AP
790const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
791{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
792
459b15d4
SL
793#elif defined(S390X_aes_128_CAPABLE)
794/* IBM S390X support */
55bd169f
PS
795typedef struct {
796 union {
39147079 797 OSSL_UNION_ALIGN;
55bd169f
PS
798 /*-
799 * KM-AES parameter block - begin
800 * (see z/Architecture Principles of Operation >= SA22-7832-06)
801 */
802 struct {
803 unsigned char k[32];
804 } param;
805 /* KM-AES parameter block - end */
806 } km;
807 unsigned int fc;
808} S390X_AES_ECB_CTX;
809
dacd2a87
PS
810typedef struct {
811 union {
39147079 812 OSSL_UNION_ALIGN;
dacd2a87
PS
813 /*-
814 * KMO-AES parameter block - begin
815 * (see z/Architecture Principles of Operation >= SA22-7832-08)
816 */
817 struct {
818 unsigned char cv[16];
819 unsigned char k[32];
820 } param;
821 /* KMO-AES parameter block - end */
822 } kmo;
823 unsigned int fc;
824
825 int res;
826} S390X_AES_OFB_CTX;
827
74d38a86
PS
828typedef struct {
829 union {
39147079 830 OSSL_UNION_ALIGN;
74d38a86
PS
831 /*-
832 * KMF-AES parameter block - begin
833 * (see z/Architecture Principles of Operation >= SA22-7832-08)
834 */
835 struct {
836 unsigned char cv[16];
837 unsigned char k[32];
838 } param;
839 /* KMF-AES parameter block - end */
840 } kmf;
841 unsigned int fc;
842
843 int res;
844} S390X_AES_CFB_CTX;
845
96530eea
PS
846typedef struct {
847 union {
39147079 848 OSSL_UNION_ALIGN;
96530eea 849 /*-
5d2a6f4b
PS
850 * KMA-GCM-AES parameter block - begin
851 * (see z/Architecture Principles of Operation >= SA22-7832-11)
96530eea
PS
852 */
853 struct {
854 unsigned char reserved[12];
855 union {
856 unsigned int w;
857 unsigned char b[4];
858 } cv;
859 union {
860 unsigned long long g[2];
861 unsigned char b[16];
862 } t;
863 unsigned char h[16];
864 unsigned long long taadl;
865 unsigned long long tpcl;
866 union {
867 unsigned long long g[2];
868 unsigned int w[4];
869 } j0;
870 unsigned char k[32];
871 } param;
5d2a6f4b 872 /* KMA-GCM-AES parameter block - end */
96530eea
PS
873 } kma;
874 unsigned int fc;
875 int key_set;
876
877 unsigned char *iv;
878 int ivlen;
879 int iv_set;
880 int iv_gen;
881
882 int taglen;
883
884 unsigned char ares[16];
885 unsigned char mres[16];
886 unsigned char kres[16];
887 int areslen;
888 int mreslen;
889 int kreslen;
890
891 int tls_aad_len;
d6b34570 892 uint64_t tls_enc_records; /* Number of TLS records encrypted */
96530eea
PS
893} S390X_AES_GCM_CTX;
894
39f5b069
PS
895typedef struct {
896 union {
39147079 897 OSSL_UNION_ALIGN;
39f5b069
PS
898 /*-
899 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
900 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
901 * rounds field is used to store the function code and that the key
902 * schedule is not stored (if aes hardware support is detected).
903 */
904 struct {
905 unsigned char pad[16];
906 AES_KEY k;
907 } key;
908
909 struct {
910 /*-
911 * KMAC-AES parameter block - begin
912 * (see z/Architecture Principles of Operation >= SA22-7832-08)
913 */
914 struct {
915 union {
916 unsigned long long g[2];
917 unsigned char b[16];
918 } icv;
919 unsigned char k[32];
920 } kmac_param;
79c44b4e 921 /* KMAC-AES parameter block - end */
39f5b069
PS
922
923 union {
924 unsigned long long g[2];
925 unsigned char b[16];
926 } nonce;
927 union {
928 unsigned long long g[2];
929 unsigned char b[16];
930 } buf;
931
932 unsigned long long blocks;
933 int l;
934 int m;
935 int tls_aad_len;
936 int iv_set;
937 int tag_set;
938 int len_set;
939 int key_set;
940
941 unsigned char pad[140];
942 unsigned int fc;
943 } ccm;
944 } aes;
945} S390X_AES_CCM_CTX;
946
96530eea
PS
947# define s390x_aes_init_key aes_init_key
948static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
949 const unsigned char *iv, int enc);
950
dd6b2706 951# define S390X_AES_CBC_CTX EVP_AES_KEY
55bd169f
PS
952
953# define s390x_aes_cbc_init_key aes_init_key
96530eea
PS
954
955# define s390x_aes_cbc_cipher aes_cbc_cipher
956static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
957 const unsigned char *in, size_t len);
958
55bd169f
PS
959static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
960 const unsigned char *key,
961 const unsigned char *iv, int enc)
962{
963 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
964 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
965
966 cctx->fc = S390X_AES_FC(keylen);
967 if (!enc)
968 cctx->fc |= S390X_DECRYPT;
969
970 memcpy(cctx->km.param.k, key, keylen);
971 return 1;
972}
96530eea 973
96530eea 974static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
55bd169f
PS
975 const unsigned char *in, size_t len)
976{
977 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
978
979 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
980 return 1;
981}
96530eea 982
dacd2a87
PS
983static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
984 const unsigned char *key,
985 const unsigned char *ivec, int enc)
986{
987 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
9197c226 988 const unsigned char *iv = ctx->oiv;
dacd2a87
PS
989 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
990 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
55bd169f 991
dacd2a87
PS
992 memcpy(cctx->kmo.param.cv, iv, ivlen);
993 memcpy(cctx->kmo.param.k, key, keylen);
994 cctx->fc = S390X_AES_FC(keylen);
995 cctx->res = 0;
996 return 1;
997}
96530eea 998
96530eea 999static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
dacd2a87
PS
1000 const unsigned char *in, size_t len)
1001{
1002 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1003 int n = cctx->res;
1004 int rem;
1005
1006 while (n && len) {
1007 *out = *in ^ cctx->kmo.param.cv[n];
1008 n = (n + 1) & 0xf;
1009 --len;
1010 ++in;
1011 ++out;
1012 }
1013
1014 rem = len & 0xf;
1015
1016 len &= ~(size_t)0xf;
1017 if (len) {
1018 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1019
1020 out += len;
1021 in += len;
1022 }
1023
1024 if (rem) {
1025 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1026 cctx->kmo.param.k);
1027
1028 while (rem--) {
1029 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1030 ++n;
1031 }
1032 }
1033
1034 cctx->res = n;
1035 return 1;
1036}
96530eea 1037
74d38a86
PS
1038static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1039 const unsigned char *key,
1040 const unsigned char *ivec, int enc)
1041{
1042 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
9197c226 1043 const unsigned char *iv = ctx->oiv;
74d38a86
PS
1044 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1045 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1046
1047 cctx->fc = S390X_AES_FC(keylen);
1048 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1049 if (!enc)
1050 cctx->fc |= S390X_DECRYPT;
55bd169f 1051
74d38a86
PS
1052 cctx->res = 0;
1053 memcpy(cctx->kmf.param.cv, iv, ivlen);
1054 memcpy(cctx->kmf.param.k, key, keylen);
1055 return 1;
1056}
96530eea 1057
96530eea 1058static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
74d38a86
PS
1059 const unsigned char *in, size_t len)
1060{
1061 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1062 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1063 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1064 int n = cctx->res;
1065 int rem;
1066 unsigned char tmp;
1067
1068 while (n && len) {
1069 tmp = *in;
1070 *out = cctx->kmf.param.cv[n] ^ tmp;
1071 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1072 n = (n + 1) & 0xf;
1073 --len;
1074 ++in;
1075 ++out;
1076 }
1077
1078 rem = len & 0xf;
1079
1080 len &= ~(size_t)0xf;
1081 if (len) {
1082 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1083
1084 out += len;
1085 in += len;
1086 }
1087
1088 if (rem) {
1089 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1090 S390X_AES_FC(keylen), cctx->kmf.param.k);
1091
1092 while (rem--) {
1093 tmp = in[n];
1094 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1095 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1096 ++n;
1097 }
1098 }
96530eea 1099
74d38a86
PS
1100 cctx->res = n;
1101 return 1;
1102}
1103
74d38a86
PS
1104static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1105 const unsigned char *key,
1106 const unsigned char *ivec, int enc)
1107{
1108 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
9197c226 1109 const unsigned char *iv = ctx->oiv;
74d38a86
PS
1110 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1111 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1112
1113 cctx->fc = S390X_AES_FC(keylen);
1114 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1115 if (!enc)
1116 cctx->fc |= S390X_DECRYPT;
96530eea 1117
74d38a86
PS
1118 memcpy(cctx->kmf.param.cv, iv, ivlen);
1119 memcpy(cctx->kmf.param.k, key, keylen);
1120 return 1;
1121}
55bd169f 1122
96530eea 1123static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
74d38a86
PS
1124 const unsigned char *in, size_t len)
1125{
1126 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1127
1128 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1129 return 1;
1130}
96530eea 1131
55bd169f
PS
1132# define s390x_aes_cfb1_init_key aes_init_key
1133
96530eea
PS
1134# define s390x_aes_cfb1_cipher aes_cfb1_cipher
1135static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1136 const unsigned char *in, size_t len);
1137
dd6b2706 1138# define S390X_AES_CTR_CTX EVP_AES_KEY
55bd169f
PS
1139
1140# define s390x_aes_ctr_init_key aes_init_key
96530eea
PS
1141
1142# define s390x_aes_ctr_cipher aes_ctr_cipher
1143static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1144 const unsigned char *in, size_t len);
1145
bcf082d1 1146/* iv + padding length for iv lengths != 12 */
dd6b2706 1147# define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
96530eea 1148
5d2a6f4b
PS
1149/*-
1150 * Process additional authenticated data. Returns 0 on success. Code is
1151 * big-endian.
1152 */
96530eea
PS
1153static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1154 size_t len)
1155{
1156 unsigned long long alen;
1157 int n, rem;
1158
1159 if (ctx->kma.param.tpcl)
1160 return -2;
1161
1162 alen = ctx->kma.param.taadl + len;
1163 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1164 return -1;
1165 ctx->kma.param.taadl = alen;
1166
1167 n = ctx->areslen;
1168 if (n) {
1169 while (n && len) {
1170 ctx->ares[n] = *aad;
1171 n = (n + 1) & 0xf;
1172 ++aad;
1173 --len;
1174 }
1175 /* ctx->ares contains a complete block if offset has wrapped around */
1176 if (!n) {
1177 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1178 ctx->fc |= S390X_KMA_HS;
1179 }
1180 ctx->areslen = n;
1181 }
1182
1183 rem = len & 0xf;
1184
25868993 1185 len &= ~(size_t)0xf;
96530eea
PS
1186 if (len) {
1187 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1188 aad += len;
1189 ctx->fc |= S390X_KMA_HS;
1190 }
1191
1192 if (rem) {
1193 ctx->areslen = rem;
1194
1195 do {
1196 --rem;
1197 ctx->ares[rem] = aad[rem];
1198 } while (rem);
1199 }
1200 return 0;
1201}
1202
5d2a6f4b
PS
1203/*-
1204 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1205 * success. Code is big-endian.
1206 */
96530eea
PS
1207static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1208 unsigned char *out, size_t len)
1209{
1210 const unsigned char *inptr;
1211 unsigned long long mlen;
1212 union {
1213 unsigned int w[4];
1214 unsigned char b[16];
1215 } buf;
1216 size_t inlen;
1217 int n, rem, i;
1218
1219 mlen = ctx->kma.param.tpcl + len;
1220 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1221 return -1;
1222 ctx->kma.param.tpcl = mlen;
1223
1224 n = ctx->mreslen;
1225 if (n) {
1226 inptr = in;
1227 inlen = len;
1228 while (n && inlen) {
1229 ctx->mres[n] = *inptr;
1230 n = (n + 1) & 0xf;
1231 ++inptr;
1232 --inlen;
1233 }
1234 /* ctx->mres contains a complete block if offset has wrapped around */
1235 if (!n) {
1236 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1237 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1238 ctx->fc |= S390X_KMA_HS;
1239 ctx->areslen = 0;
1240
1241 /* previous call already encrypted/decrypted its remainder,
1242 * see comment below */
1243 n = ctx->mreslen;
1244 while (n) {
1245 *out = buf.b[n];
1246 n = (n + 1) & 0xf;
1247 ++out;
1248 ++in;
1249 --len;
1250 }
1251 ctx->mreslen = 0;
1252 }
1253 }
1254
1255 rem = len & 0xf;
1256
25868993 1257 len &= ~(size_t)0xf;
96530eea
PS
1258 if (len) {
1259 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1260 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1261 in += len;
1262 out += len;
1263 ctx->fc |= S390X_KMA_HS;
1264 ctx->areslen = 0;
1265 }
1266
1267 /*-
1268 * If there is a remainder, it has to be saved such that it can be
1269 * processed by kma later. However, we also have to do the for-now
1270 * unauthenticated encryption/decryption part here and now...
1271 */
1272 if (rem) {
1273 if (!ctx->mreslen) {
1274 buf.w[0] = ctx->kma.param.j0.w[0];
1275 buf.w[1] = ctx->kma.param.j0.w[1];
1276 buf.w[2] = ctx->kma.param.j0.w[2];
1277 buf.w[3] = ctx->kma.param.cv.w + 1;
1278 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1279 }
1280
1281 n = ctx->mreslen;
1282 for (i = 0; i < rem; i++) {
1283 ctx->mres[n + i] = in[i];
1284 out[i] = in[i] ^ ctx->kres[n + i];
1285 }
1286
1287 ctx->mreslen += rem;
1288 }
1289 return 0;
1290}
1291
5d2a6f4b
PS
1292/*-
1293 * Initialize context structure. Code is big-endian.
1294 */
96530eea
PS
1295static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1296 const unsigned char *iv)
1297{
1298 ctx->kma.param.t.g[0] = 0;
1299 ctx->kma.param.t.g[1] = 0;
1300 ctx->kma.param.tpcl = 0;
1301 ctx->kma.param.taadl = 0;
1302 ctx->mreslen = 0;
1303 ctx->areslen = 0;
1304 ctx->kreslen = 0;
1305
1306 if (ctx->ivlen == 12) {
1307 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1308 ctx->kma.param.j0.w[3] = 1;
1309 ctx->kma.param.cv.w = 1;
1310 } else {
1311 /* ctx->iv has the right size and is already padded. */
1312 memcpy(ctx->iv, iv, ctx->ivlen);
1313 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1314 ctx->fc, &ctx->kma.param);
1315 ctx->fc |= S390X_KMA_HS;
1316
1317 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1318 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1319 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1320 ctx->kma.param.t.g[0] = 0;
1321 ctx->kma.param.t.g[1] = 0;
1322 }
1323}
1324
5d2a6f4b
PS
1325/*-
1326 * Performs various operations on the context structure depending on control
1327 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1328 * Code is big-endian.
1329 */
96530eea
PS
1330static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1331{
1332 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1333 S390X_AES_GCM_CTX *gctx_out;
1334 EVP_CIPHER_CTX *out;
9197c226 1335 unsigned char *buf;
96530eea
PS
1336 int ivlen, enc, len;
1337
1338 switch (type) {
1339 case EVP_CTRL_INIT:
7dddf2fc 1340 ivlen = EVP_CIPHER_iv_length(c->cipher);
96530eea
PS
1341 gctx->key_set = 0;
1342 gctx->iv_set = 0;
1343 gctx->ivlen = ivlen;
9197c226 1344 gctx->iv = c->iv;
96530eea
PS
1345 gctx->taglen = -1;
1346 gctx->iv_gen = 0;
1347 gctx->tls_aad_len = -1;
1348 return 1;
1349
7dddf2fc
SL
1350 case EVP_CTRL_GET_IVLEN:
1351 *(int *)ptr = gctx->ivlen;
1352 return 1;
1353
96530eea
PS
1354 case EVP_CTRL_AEAD_SET_IVLEN:
1355 if (arg <= 0)
1356 return 0;
1357
1358 if (arg != 12) {
96530eea
PS
1359 len = S390X_gcm_ivpadlen(arg);
1360
1361 /* Allocate memory for iv if needed. */
1362 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
9197c226 1363 if (gctx->iv != c->iv)
96530eea
PS
1364 OPENSSL_free(gctx->iv);
1365
cdb10bae 1366 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
9311d0c4 1367 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
96530eea 1368 return 0;
cdb10bae 1369 }
96530eea
PS
1370 }
1371 /* Add padding. */
1372 memset(gctx->iv + arg, 0, len - arg - 8);
1373 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1374 }
1375 gctx->ivlen = arg;
1376 return 1;
1377
1378 case EVP_CTRL_AEAD_SET_TAG:
1379 buf = EVP_CIPHER_CTX_buf_noconst(c);
1380 enc = EVP_CIPHER_CTX_encrypting(c);
1381 if (arg <= 0 || arg > 16 || enc)
1382 return 0;
1383
1384 memcpy(buf, ptr, arg);
1385 gctx->taglen = arg;
1386 return 1;
1387
1388 case EVP_CTRL_AEAD_GET_TAG:
1389 enc = EVP_CIPHER_CTX_encrypting(c);
1390 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1391 return 0;
1392
1393 memcpy(ptr, gctx->kma.param.t.b, arg);
1394 return 1;
1395
1396 case EVP_CTRL_GCM_SET_IV_FIXED:
1397 /* Special case: -1 length restores whole iv */
1398 if (arg == -1) {
1399 memcpy(gctx->iv, ptr, gctx->ivlen);
1400 gctx->iv_gen = 1;
1401 return 1;
1402 }
1403 /*
1404 * Fixed field must be at least 4 bytes and invocation field at least
1405 * 8.
1406 */
1407 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1408 return 0;
1409
1410 if (arg)
1411 memcpy(gctx->iv, ptr, arg);
1412
1413 enc = EVP_CIPHER_CTX_encrypting(c);
16cfc2c9
KR
1414 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1415 return 0;
96530eea
PS
1416
1417 gctx->iv_gen = 1;
1418 return 1;
1419
1420 case EVP_CTRL_GCM_IV_GEN:
1421 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1422 return 0;
1423
1424 s390x_aes_gcm_setiv(gctx, gctx->iv);
1425
1426 if (arg <= 0 || arg > gctx->ivlen)
1427 arg = gctx->ivlen;
1428
1429 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1430 /*
1431 * Invocation field will be at least 8 bytes in size and so no need
1432 * to check wrap around or increment more than last 8 bytes.
1433 */
03a5e5ae 1434 ctr64_inc(gctx->iv + gctx->ivlen - 8);
96530eea
PS
1435 gctx->iv_set = 1;
1436 return 1;
1437
1438 case EVP_CTRL_GCM_SET_IV_INV:
1439 enc = EVP_CIPHER_CTX_encrypting(c);
1440 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1441 return 0;
1442
1443 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1444 s390x_aes_gcm_setiv(gctx, gctx->iv);
1445 gctx->iv_set = 1;
1446 return 1;
1447
1448 case EVP_CTRL_AEAD_TLS1_AAD:
1449 /* Save the aad for later use. */
1450 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1451 return 0;
1452
1453 buf = EVP_CIPHER_CTX_buf_noconst(c);
1454 memcpy(buf, ptr, arg);
1455 gctx->tls_aad_len = arg;
d6b34570 1456 gctx->tls_enc_records = 0;
96530eea
PS
1457
1458 len = buf[arg - 2] << 8 | buf[arg - 1];
1459 /* Correct length for explicit iv. */
1460 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1461 return 0;
1462 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1463
1464 /* If decrypting correct for tag too. */
1465 enc = EVP_CIPHER_CTX_encrypting(c);
1466 if (!enc) {
1467 if (len < EVP_GCM_TLS_TAG_LEN)
1468 return 0;
1469 len -= EVP_GCM_TLS_TAG_LEN;
1470 }
1471 buf[arg - 2] = len >> 8;
1472 buf[arg - 1] = len & 0xff;
1473 /* Extra padding: tag appended to record. */
1474 return EVP_GCM_TLS_TAG_LEN;
1475
1476 case EVP_CTRL_COPY:
1477 out = ptr;
1478 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
96530eea 1479
9197c226
BK
1480 if (gctx->iv == c->iv) {
1481 gctx_out->iv = out->iv;
96530eea
PS
1482 } else {
1483 len = S390X_gcm_ivpadlen(gctx->ivlen);
1484
cdb10bae 1485 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
9311d0c4 1486 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
96530eea 1487 return 0;
cdb10bae 1488 }
96530eea
PS
1489
1490 memcpy(gctx_out->iv, gctx->iv, len);
1491 }
1492 return 1;
1493
1494 default:
1495 return -1;
1496 }
1497}
1498
5d2a6f4b
PS
1499/*-
1500 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1501 */
96530eea
PS
1502static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1503 const unsigned char *key,
1504 const unsigned char *iv, int enc)
1505{
1506 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1507 int keylen;
1508
1509 if (iv == NULL && key == NULL)
1510 return 1;
1511
1512 if (key != NULL) {
1513 keylen = EVP_CIPHER_CTX_key_length(ctx);
1514 memcpy(&gctx->kma.param.k, key, keylen);
1515
8eb399fb 1516 gctx->fc = S390X_AES_FC(keylen);
96530eea
PS
1517 if (!enc)
1518 gctx->fc |= S390X_DECRYPT;
1519
1520 if (iv == NULL && gctx->iv_set)
1521 iv = gctx->iv;
1522
1523 if (iv != NULL) {
1524 s390x_aes_gcm_setiv(gctx, iv);
1525 gctx->iv_set = 1;
1526 }
1527 gctx->key_set = 1;
1528 } else {
1529 if (gctx->key_set)
1530 s390x_aes_gcm_setiv(gctx, iv);
1531 else
1532 memcpy(gctx->iv, iv, gctx->ivlen);
1533
1534 gctx->iv_set = 1;
1535 gctx->iv_gen = 0;
1536 }
1537 return 1;
1538}
1539
5d2a6f4b
PS
1540/*-
1541 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1542 * if successful. Otherwise -1 is returned. Code is big-endian.
1543 */
96530eea
PS
1544static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1545 const unsigned char *in, size_t len)
1546{
1547 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1548 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1549 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1550 int rv = -1;
1551
1552 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1553 return -1;
1554
d6b34570
P
1555 /*
1556 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1557 * Requirements from SP 800-38D". The requirements is for one party to the
1558 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1559 * side only.
1560 */
1561 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
9311d0c4 1562 ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
d6b34570
P
1563 goto err;
1564 }
1565
96530eea
PS
1566 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1567 : EVP_CTRL_GCM_SET_IV_INV,
1568 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1569 goto err;
1570
1571 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1572 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1573 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1574
1575 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1576 gctx->kma.param.tpcl = len << 3;
1577 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1578 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1579
1580 if (enc) {
1581 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1582 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1583 } else {
1584 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1585 EVP_GCM_TLS_TAG_LEN)) {
1586 OPENSSL_cleanse(out, len);
1587 goto err;
1588 }
1589 rv = len;
1590 }
1591err:
1592 gctx->iv_set = 0;
1593 gctx->tls_aad_len = -1;
1594 return rv;
1595}
1596
5d2a6f4b
PS
1597/*-
1598 * Called from EVP layer to initialize context, process additional
1599 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1600 * ciphertext or process a TLS packet, depending on context. Returns bytes
1601 * written on success. Otherwise -1 is returned. Code is big-endian.
1602 */
96530eea
PS
1603static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1604 const unsigned char *in, size_t len)
1605{
1606 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1607 unsigned char *buf, tmp[16];
1608 int enc;
1609
1610 if (!gctx->key_set)
1611 return -1;
1612
1613 if (gctx->tls_aad_len >= 0)
1614 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1615
1616 if (!gctx->iv_set)
1617 return -1;
1618
1619 if (in != NULL) {
1620 if (out == NULL) {
1621 if (s390x_aes_gcm_aad(gctx, in, len))
1622 return -1;
1623 } else {
1624 if (s390x_aes_gcm(gctx, in, out, len))
1625 return -1;
1626 }
1627 return len;
1628 } else {
1629 gctx->kma.param.taadl <<= 3;
1630 gctx->kma.param.tpcl <<= 3;
1631 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1632 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1633 /* recall that we already did en-/decrypt gctx->mres
1634 * and returned it to caller... */
1635 OPENSSL_cleanse(tmp, gctx->mreslen);
1636 gctx->iv_set = 0;
1637
1638 enc = EVP_CIPHER_CTX_encrypting(ctx);
1639 if (enc) {
1640 gctx->taglen = 16;
1641 } else {
1642 if (gctx->taglen < 0)
1643 return -1;
1644
1645 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1646 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1647 return -1;
1648 }
1649 return 0;
1650 }
1651}
1652
1653static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1654{
1655 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
96530eea
PS
1656
1657 if (gctx == NULL)
1658 return 0;
1659
9197c226 1660 if (gctx->iv != c->iv)
96530eea
PS
1661 OPENSSL_free(gctx->iv);
1662
1663 OPENSSL_cleanse(gctx, sizeof(*gctx));
1664 return 1;
1665}
1666
dd6b2706 1667# define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
96530eea
PS
1668
1669# define s390x_aes_xts_init_key aes_xts_init_key
1670static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1671 const unsigned char *key,
1672 const unsigned char *iv, int enc);
1673# define s390x_aes_xts_cipher aes_xts_cipher
1674static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1675 const unsigned char *in, size_t len);
1676# define s390x_aes_xts_ctrl aes_xts_ctrl
1677static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1678# define s390x_aes_xts_cleanup aes_xts_cleanup
1679
39f5b069
PS
1680/*-
1681 * Set nonce and length fields. Code is big-endian.
1682 */
1683static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1684 const unsigned char *nonce,
1685 size_t mlen)
1686{
1687 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1688 ctx->aes.ccm.nonce.g[1] = mlen;
1689 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1690}
1691
1692/*-
1693 * Process additional authenticated data. Code is big-endian.
1694 */
1695static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1696 size_t alen)
1697{
1698 unsigned char *ptr;
1699 int i, rem;
1700
1701 if (!alen)
1702 return;
1703
1704 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1705
1706 /* Suppress 'type-punned pointer dereference' warning. */
1707 ptr = ctx->aes.ccm.buf.b;
1708
1709 if (alen < ((1 << 16) - (1 << 8))) {
1710 *(uint16_t *)ptr = alen;
1711 i = 2;
1712 } else if (sizeof(alen) == 8
1713 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1714 *(uint16_t *)ptr = 0xffff;
1715 *(uint64_t *)(ptr + 2) = alen;
1716 i = 10;
1717 } else {
1718 *(uint16_t *)ptr = 0xfffe;
1719 *(uint32_t *)(ptr + 2) = alen;
1720 i = 6;
1721 }
1722
1723 while (i < 16 && alen) {
1724 ctx->aes.ccm.buf.b[i] = *aad;
1725 ++aad;
1726 --alen;
1727 ++i;
1728 }
1729 while (i < 16) {
1730 ctx->aes.ccm.buf.b[i] = 0;
1731 ++i;
1732 }
1733
1734 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1735 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1736 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1737 &ctx->aes.ccm.kmac_param);
1738 ctx->aes.ccm.blocks += 2;
1739
1740 rem = alen & 0xf;
25868993 1741 alen &= ~(size_t)0xf;
39f5b069
PS
1742 if (alen) {
1743 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1744 ctx->aes.ccm.blocks += alen >> 4;
1745 aad += alen;
1746 }
1747 if (rem) {
1748 for (i = 0; i < rem; i++)
1749 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1750
1751 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1752 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1753 ctx->aes.ccm.kmac_param.k);
1754 ctx->aes.ccm.blocks++;
1755 }
1756}
1757
1758/*-
1759 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1760 * success.
1761 */
1762static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1763 unsigned char *out, size_t len, int enc)
1764{
1765 size_t n, rem;
1766 unsigned int i, l, num;
1767 unsigned char flags;
1768
1769 flags = ctx->aes.ccm.nonce.b[0];
1770 if (!(flags & S390X_CCM_AAD_FLAG)) {
1771 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1772 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1773 ctx->aes.ccm.blocks++;
1774 }
1775 l = flags & 0x7;
1776 ctx->aes.ccm.nonce.b[0] = l;
1777
1778 /*-
1779 * Reconstruct length from encoded length field
1780 * and initialize it with counter value.
1781 */
1782 n = 0;
1783 for (i = 15 - l; i < 15; i++) {
1784 n |= ctx->aes.ccm.nonce.b[i];
1785 ctx->aes.ccm.nonce.b[i] = 0;
1786 n <<= 8;
1787 }
1788 n |= ctx->aes.ccm.nonce.b[15];
1789 ctx->aes.ccm.nonce.b[15] = 1;
1790
1791 if (n != len)
dd6b2706 1792 return -1; /* length mismatch */
39f5b069
PS
1793
1794 if (enc) {
1795 /* Two operations per block plus one for tag encryption */
1796 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1797 if (ctx->aes.ccm.blocks > (1ULL << 61))
dd6b2706 1798 return -2; /* too much data */
39f5b069
PS
1799 }
1800
1801 num = 0;
1802 rem = len & 0xf;
25868993 1803 len &= ~(size_t)0xf;
39f5b069
PS
1804
1805 if (enc) {
1806 /* mac-then-encrypt */
1807 if (len)
1808 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1809 if (rem) {
1810 for (i = 0; i < rem; i++)
1811 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1812
1813 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1814 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1815 ctx->aes.ccm.kmac_param.k);
1816 }
1817
1818 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1819 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1820 &num, (ctr128_f)AES_ctr32_encrypt);
1821 } else {
1822 /* decrypt-then-mac */
1823 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1824 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1825 &num, (ctr128_f)AES_ctr32_encrypt);
1826
1827 if (len)
1828 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1829 if (rem) {
1830 for (i = 0; i < rem; i++)
1831 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1832
1833 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1834 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1835 ctx->aes.ccm.kmac_param.k);
1836 }
1837 }
1838 /* encrypt tag */
1839 for (i = 15 - l; i < 16; i++)
1840 ctx->aes.ccm.nonce.b[i] = 0;
1841
1842 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1843 ctx->aes.ccm.kmac_param.k);
1844 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1845 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1846
dd6b2706 1847 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
39f5b069
PS
1848 return 0;
1849}
1850
1851/*-
1852 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1853 * if successful. Otherwise -1 is returned.
1854 */
1855static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1856 const unsigned char *in, size_t len)
1857{
1858 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
9197c226 1859 unsigned char *ivec = ctx->iv;
39f5b069
PS
1860 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1861 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1862
1863 if (out != in
1864 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1865 return -1;
1866
1867 if (enc) {
1868 /* Set explicit iv (sequence number). */
1869 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1870 }
1871
1872 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1873 /*-
1874 * Get explicit iv (sequence number). We already have fixed iv
1875 * (server/client_write_iv) here.
1876 */
1877 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1878 s390x_aes_ccm_setiv(cctx, ivec, len);
1879
1880 /* Process aad (sequence number|type|version|length) */
1881 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1882
1883 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1884 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
96530eea 1885
39f5b069
PS
1886 if (enc) {
1887 if (s390x_aes_ccm(cctx, in, out, len, enc))
1888 return -1;
1889
1890 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1891 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1892 } else {
1893 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1894 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1895 cctx->aes.ccm.m))
1896 return len;
1897 }
1898
1899 OPENSSL_cleanse(out, len);
1900 return -1;
1901 }
1902}
1903
1904/*-
1905 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1906 * returned.
1907 */
96530eea
PS
1908static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1909 const unsigned char *key,
39f5b069
PS
1910 const unsigned char *iv, int enc)
1911{
1912 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
39f5b069
PS
1913 int keylen;
1914
1915 if (iv == NULL && key == NULL)
1916 return 1;
1917
1918 if (key != NULL) {
1919 keylen = EVP_CIPHER_CTX_key_length(ctx);
8eb399fb 1920 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
39f5b069
PS
1921 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1922
1923 /* Store encoded m and l. */
1924 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1925 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1926 memset(cctx->aes.ccm.nonce.b + 1, 0,
1927 sizeof(cctx->aes.ccm.nonce.b));
1928 cctx->aes.ccm.blocks = 0;
1929
1930 cctx->aes.ccm.key_set = 1;
1931 }
1932
1933 if (iv != NULL) {
9197c226 1934 memcpy(ctx->iv, iv, 15 - cctx->aes.ccm.l);
39f5b069
PS
1935
1936 cctx->aes.ccm.iv_set = 1;
1937 }
1938
1939 return 1;
1940}
1941
1942/*-
1943 * Called from EVP layer to initialize context, process additional
1944 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1945 * plaintext or process a TLS packet, depending on context. Returns bytes
1946 * written on success. Otherwise -1 is returned.
1947 */
96530eea 1948static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
39f5b069
PS
1949 const unsigned char *in, size_t len)
1950{
1951 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1952 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1953 int rv;
9197c226 1954 unsigned char *buf;
39f5b069
PS
1955
1956 if (!cctx->aes.ccm.key_set)
1957 return -1;
1958
1959 if (cctx->aes.ccm.tls_aad_len >= 0)
1960 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1961
1962 /*-
1963 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1964 * so integrity must be checked already at Update() i.e., before
1965 * potentially corrupted data is output.
1966 */
1967 if (in == NULL && out != NULL)
1968 return 0;
1969
1970 if (!cctx->aes.ccm.iv_set)
1971 return -1;
1972
39f5b069
PS
1973 if (out == NULL) {
1974 /* Update(): Pass message length. */
1975 if (in == NULL) {
9197c226 1976 s390x_aes_ccm_setiv(cctx, ctx->iv, len);
39f5b069
PS
1977
1978 cctx->aes.ccm.len_set = 1;
1979 return len;
1980 }
1981
1982 /* Update(): Process aad. */
1983 if (!cctx->aes.ccm.len_set && len)
1984 return -1;
1985
1986 s390x_aes_ccm_aad(cctx, in, len);
1987 return len;
1988 }
1989
887e22dd
PS
1990 /* The tag must be set before actually decrypting data */
1991 if (!enc && !cctx->aes.ccm.tag_set)
1992 return -1;
1993
39f5b069
PS
1994 /* Update(): Process message. */
1995
1996 if (!cctx->aes.ccm.len_set) {
1997 /*-
46d08509 1998 * In case message length was not previously set explicitly via
39f5b069
PS
1999 * Update(), set it now.
2000 */
9197c226 2001 s390x_aes_ccm_setiv(cctx, ctx->iv, len);
39f5b069
PS
2002
2003 cctx->aes.ccm.len_set = 1;
2004 }
2005
2006 if (enc) {
2007 if (s390x_aes_ccm(cctx, in, out, len, enc))
2008 return -1;
2009
2010 cctx->aes.ccm.tag_set = 1;
2011 return len;
2012 } else {
2013 rv = -1;
2014
2015 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2016 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2017 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2018 cctx->aes.ccm.m))
2019 rv = len;
2020 }
2021
2022 if (rv == -1)
2023 OPENSSL_cleanse(out, len);
2024
2025 cctx->aes.ccm.iv_set = 0;
2026 cctx->aes.ccm.tag_set = 0;
2027 cctx->aes.ccm.len_set = 0;
2028 return rv;
2029 }
2030}
2031
2032/*-
2033 * Performs various operations on the context structure depending on control
2034 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2035 * Code is big-endian.
2036 */
2037static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2038{
2039 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
9197c226 2040 unsigned char *buf;
39f5b069
PS
2041 int enc, len;
2042
2043 switch (type) {
2044 case EVP_CTRL_INIT:
2045 cctx->aes.ccm.key_set = 0;
2046 cctx->aes.ccm.iv_set = 0;
2047 cctx->aes.ccm.l = 8;
2048 cctx->aes.ccm.m = 12;
2049 cctx->aes.ccm.tag_set = 0;
2050 cctx->aes.ccm.len_set = 0;
2051 cctx->aes.ccm.tls_aad_len = -1;
2052 return 1;
2053
7dddf2fc
SL
2054 case EVP_CTRL_GET_IVLEN:
2055 *(int *)ptr = 15 - cctx->aes.ccm.l;
2056 return 1;
2057
39f5b069
PS
2058 case EVP_CTRL_AEAD_TLS1_AAD:
2059 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2060 return 0;
2061
2062 /* Save the aad for later use. */
2063 buf = EVP_CIPHER_CTX_buf_noconst(c);
2064 memcpy(buf, ptr, arg);
2065 cctx->aes.ccm.tls_aad_len = arg;
2066
03a5e5ae 2067 len = buf[arg - 2] << 8 | buf[arg - 1];
39f5b069
PS
2068 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2069 return 0;
2070
2071 /* Correct length for explicit iv. */
2072 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2073
2074 enc = EVP_CIPHER_CTX_encrypting(c);
2075 if (!enc) {
2076 if (len < cctx->aes.ccm.m)
2077 return 0;
2078
2079 /* Correct length for tag. */
2080 len -= cctx->aes.ccm.m;
2081 }
2082
03a5e5ae
PS
2083 buf[arg - 2] = len >> 8;
2084 buf[arg - 1] = len & 0xff;
2085
39f5b069
PS
2086 /* Extra padding: tag appended to record. */
2087 return cctx->aes.ccm.m;
2088
2089 case EVP_CTRL_CCM_SET_IV_FIXED:
2090 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2091 return 0;
2092
2093 /* Copy to first part of the iv. */
9197c226 2094 memcpy(c->iv, ptr, arg);
39f5b069
PS
2095 return 1;
2096
2097 case EVP_CTRL_AEAD_SET_IVLEN:
2098 arg = 15 - arg;
2099 /* fall-through */
2100
2101 case EVP_CTRL_CCM_SET_L:
2102 if (arg < 2 || arg > 8)
2103 return 0;
2104
2105 cctx->aes.ccm.l = arg;
2106 return 1;
2107
2108 case EVP_CTRL_AEAD_SET_TAG:
2109 if ((arg & 1) || arg < 4 || arg > 16)
2110 return 0;
2111
2112 enc = EVP_CIPHER_CTX_encrypting(c);
2113 if (enc && ptr)
2114 return 0;
2115
2116 if (ptr) {
2117 cctx->aes.ccm.tag_set = 1;
2118 buf = EVP_CIPHER_CTX_buf_noconst(c);
2119 memcpy(buf, ptr, arg);
2120 }
2121
2122 cctx->aes.ccm.m = arg;
2123 return 1;
2124
2125 case EVP_CTRL_AEAD_GET_TAG:
2126 enc = EVP_CIPHER_CTX_encrypting(c);
2127 if (!enc || !cctx->aes.ccm.tag_set)
2128 return 0;
2129
2130 if(arg < cctx->aes.ccm.m)
2131 return 0;
2132
2133 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2134 cctx->aes.ccm.tag_set = 0;
2135 cctx->aes.ccm.iv_set = 0;
2136 cctx->aes.ccm.len_set = 0;
2137 return 1;
2138
2139 case EVP_CTRL_COPY:
2140 return 1;
2141
2142 default:
2143 return -1;
2144 }
2145}
2146
96530eea
PS
2147# define s390x_aes_ccm_cleanup aes_ccm_cleanup
2148
2149# ifndef OPENSSL_NO_OCB
dd6b2706 2150# define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
96530eea
PS
2151
2152# define s390x_aes_ocb_init_key aes_ocb_init_key
2153static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2154 const unsigned char *iv, int enc);
2155# define s390x_aes_ocb_cipher aes_ocb_cipher
2156static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2157 const unsigned char *in, size_t len);
2158# define s390x_aes_ocb_cleanup aes_ocb_cleanup
2159static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2160# define s390x_aes_ocb_ctrl aes_ocb_ctrl
2161static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2162# endif
2163
e74be3d4
RL
2164# ifndef OPENSSL_NO_SIV
2165# define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
e74be3d4
RL
2166
2167# define s390x_aes_siv_init_key aes_siv_init_key
2168# define s390x_aes_siv_cipher aes_siv_cipher
2169# define s390x_aes_siv_cleanup aes_siv_cleanup
2170# define s390x_aes_siv_ctrl aes_siv_ctrl
2171# endif
2172
dd6b2706
P
2173# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2174 MODE,flags) \
2175static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2176 nid##_##keylen##_##nmode,blocksize, \
2177 keylen / 8, \
2178 ivlen, \
2179 flags | EVP_CIPH_##MODE##_MODE, \
2180 s390x_aes_##mode##_init_key, \
2181 s390x_aes_##mode##_cipher, \
2182 NULL, \
2183 sizeof(S390X_AES_##MODE##_CTX), \
2184 NULL, \
2185 NULL, \
2186 NULL, \
2187 NULL \
2188}; \
2189static const EVP_CIPHER aes_##keylen##_##mode = { \
2190 nid##_##keylen##_##nmode, \
2191 blocksize, \
2192 keylen / 8, \
2193 ivlen, \
2194 flags | EVP_CIPH_##MODE##_MODE, \
2195 aes_init_key, \
2196 aes_##mode##_cipher, \
2197 NULL, \
2198 sizeof(EVP_AES_KEY), \
2199 NULL, \
2200 NULL, \
2201 NULL, \
2202 NULL \
2203}; \
2204const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2205{ \
2206 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2207 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
96530eea
PS
2208}
2209
2210# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
dd6b2706
P
2211static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2212 nid##_##keylen##_##mode, \
2213 blocksize, \
2214 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2215 ivlen, \
2216 flags | EVP_CIPH_##MODE##_MODE, \
2217 s390x_aes_##mode##_init_key, \
2218 s390x_aes_##mode##_cipher, \
2219 s390x_aes_##mode##_cleanup, \
2220 sizeof(S390X_AES_##MODE##_CTX), \
2221 NULL, \
2222 NULL, \
2223 s390x_aes_##mode##_ctrl, \
2224 NULL \
2225}; \
2226static const EVP_CIPHER aes_##keylen##_##mode = { \
2227 nid##_##keylen##_##mode,blocksize, \
2228 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2229 ivlen, \
2230 flags | EVP_CIPH_##MODE##_MODE, \
2231 aes_##mode##_init_key, \
2232 aes_##mode##_cipher, \
2233 aes_##mode##_cleanup, \
2234 sizeof(EVP_AES_##MODE##_CTX), \
2235 NULL, \
2236 NULL, \
2237 aes_##mode##_ctrl, \
2238 NULL \
2239}; \
2240const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2241{ \
2242 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2243 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
96530eea
PS
2244}
2245
5158c763 2246#else
17f121de 2247
5158c763 2248# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
17f121de 2249static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
2250 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2251 flags|EVP_CIPH_##MODE##_MODE, \
2252 aes_init_key, \
2253 aes_##mode##_cipher, \
2254 NULL, \
2255 sizeof(EVP_AES_KEY), \
2256 NULL,NULL,NULL,NULL }; \
17f121de
AP
2257const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2258{ return &aes_##keylen##_##mode; }
d1fff483 2259
5158c763 2260# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
17f121de 2261static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 2262 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
2263 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2264 ivlen, \
0f113f3e
MC
2265 flags|EVP_CIPH_##MODE##_MODE, \
2266 aes_##mode##_init_key, \
2267 aes_##mode##_cipher, \
2268 aes_##mode##_cleanup, \
2269 sizeof(EVP_AES_##MODE##_CTX), \
2270 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de
AP
2271const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2272{ return &aes_##keylen##_##mode; }
9575d1a9 2273
5158c763 2274#endif
9575d1a9 2275
5158c763 2276#define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
0f113f3e
MC
2277 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2278 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2279 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2280 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2281 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2282 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2283 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
d1fff483
AP
2284
2285static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
2286 const unsigned char *iv, int enc)
2287{
2288 int ret, mode;
6435f0f6 2289 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2290
6435f0f6 2291 mode = EVP_CIPHER_CTX_mode(ctx);
0f113f3e 2292 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
c01a3c6d 2293 && !enc) {
5158c763 2294#ifdef HWAES_CAPABLE
0f113f3e 2295 if (HWAES_CAPABLE) {
6435f0f6
RL
2296 ret = HWAES_set_decrypt_key(key,
2297 EVP_CIPHER_CTX_key_length(ctx) * 8,
2298 &dat->ks.ks);
0f113f3e
MC
2299 dat->block = (block128_f) HWAES_decrypt;
2300 dat->stream.cbc = NULL;
5158c763 2301# ifdef HWAES_cbc_encrypt
0f113f3e
MC
2302 if (mode == EVP_CIPH_CBC_MODE)
2303 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
0f113f3e 2304# endif
5158c763
MC
2305 } else
2306#endif
2307#ifdef BSAES_CAPABLE
0f113f3e 2308 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
6435f0f6
RL
2309 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2310 &dat->ks.ks);
0f113f3e
MC
2311 dat->block = (block128_f) AES_decrypt;
2312 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2313 } else
5158c763
MC
2314#endif
2315#ifdef VPAES_CAPABLE
0f113f3e 2316 if (VPAES_CAPABLE) {
6435f0f6
RL
2317 ret = vpaes_set_decrypt_key(key,
2318 EVP_CIPHER_CTX_key_length(ctx) * 8,
2319 &dat->ks.ks);
0f113f3e
MC
2320 dat->block = (block128_f) vpaes_decrypt;
2321 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2322 (cbc128_f) vpaes_cbc_encrypt : NULL;
2323 } else
5158c763 2324#endif
0f113f3e 2325 {
6435f0f6
RL
2326 ret = AES_set_decrypt_key(key,
2327 EVP_CIPHER_CTX_key_length(ctx) * 8,
2328 &dat->ks.ks);
0f113f3e
MC
2329 dat->block = (block128_f) AES_decrypt;
2330 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2331 (cbc128_f) AES_cbc_encrypt : NULL;
c01a3c6d 2332 }
0f113f3e 2333 } else
5158c763 2334#ifdef HWAES_CAPABLE
0f113f3e 2335 if (HWAES_CAPABLE) {
6435f0f6
RL
2336 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2337 &dat->ks.ks);
0f113f3e
MC
2338 dat->block = (block128_f) HWAES_encrypt;
2339 dat->stream.cbc = NULL;
5158c763 2340# ifdef HWAES_cbc_encrypt
0f113f3e
MC
2341 if (mode == EVP_CIPH_CBC_MODE)
2342 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2343 else
5158c763
MC
2344# endif
2345# ifdef HWAES_ctr32_encrypt_blocks
0f113f3e
MC
2346 if (mode == EVP_CIPH_CTR_MODE)
2347 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2348 else
5158c763 2349# endif
0f113f3e
MC
2350 (void)0; /* terminate potentially open 'else' */
2351 } else
5158c763
MC
2352#endif
2353#ifdef BSAES_CAPABLE
0f113f3e 2354 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
6435f0f6
RL
2355 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2356 &dat->ks.ks);
0f113f3e
MC
2357 dat->block = (block128_f) AES_encrypt;
2358 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2359 } else
5158c763
MC
2360#endif
2361#ifdef VPAES_CAPABLE
0f113f3e 2362 if (VPAES_CAPABLE) {
6435f0f6
RL
2363 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2364 &dat->ks.ks);
0f113f3e
MC
2365 dat->block = (block128_f) vpaes_encrypt;
2366 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2367 (cbc128_f) vpaes_cbc_encrypt : NULL;
2368 } else
5158c763 2369#endif
0f113f3e 2370 {
6435f0f6
RL
2371 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2372 &dat->ks.ks);
0f113f3e
MC
2373 dat->block = (block128_f) AES_encrypt;
2374 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2375 (cbc128_f) AES_cbc_encrypt : NULL;
5158c763 2376#ifdef AES_CTR_ASM
0f113f3e
MC
2377 if (mode == EVP_CIPH_CTR_MODE)
2378 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
5158c763 2379#endif
0f113f3e 2380 }
d1fff483 2381
0f113f3e 2382 if (ret < 0) {
9311d0c4 2383 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
2384 return 0;
2385 }
d1fff483 2386
0f113f3e
MC
2387 return 1;
2388}
d1fff483 2389
0f113f3e
MC
2390static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2391 const unsigned char *in, size_t len)
17f121de 2392{
6435f0f6 2393 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2394
0f113f3e 2395 if (dat->stream.cbc)
9197c226 2396 (*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv,
6435f0f6
RL
2397 EVP_CIPHER_CTX_encrypting(ctx));
2398 else if (EVP_CIPHER_CTX_encrypting(ctx))
9197c226
BK
2399 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
2400 dat->block);
0f113f3e 2401 else
6435f0f6 2402 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
9197c226 2403 ctx->iv, dat->block);
17f121de 2404
0f113f3e 2405 return 1;
17f121de
AP
2406}
2407
0f113f3e
MC
2408static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2409 const unsigned char *in, size_t len)
17f121de 2410{
6435f0f6 2411 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
0f113f3e 2412 size_t i;
6435f0f6 2413 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
17f121de 2414
0f113f3e
MC
2415 if (len < bl)
2416 return 1;
17f121de 2417
0f113f3e
MC
2418 for (i = 0, len -= bl; i <= len; i += bl)
2419 (*dat->block) (in + i, out + i, &dat->ks);
17f121de 2420
0f113f3e 2421 return 1;
17f121de 2422}
deb2c1a1 2423
0f113f3e
MC
2424static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2425 const unsigned char *in, size_t len)
17f121de 2426{
6435f0f6 2427 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2428
6435f0f6 2429 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2430 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
9197c226 2431 ctx->iv, &num, dat->block);
6435f0f6 2432 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2433 return 1;
17f121de 2434}
deb2c1a1 2435
0f113f3e
MC
2436static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2437 const unsigned char *in, size_t len)
17f121de 2438{
6435f0f6 2439 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2440
6435f0f6 2441 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2442 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
9197c226 2443 ctx->iv, &num,
6435f0f6
RL
2444 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2445 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2446 return 1;
17f121de
AP
2447}
2448
0f113f3e
MC
2449static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2450 const unsigned char *in, size_t len)
17f121de 2451{
6435f0f6 2452 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2453
6435f0f6 2454 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2455 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
9197c226 2456 ctx->iv, &num,
6435f0f6
RL
2457 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2458 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2459 return 1;
17f121de 2460}
8d1ebe0b 2461
0f113f3e
MC
2462static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2463 const unsigned char *in, size_t len)
17f121de 2464{
6435f0f6 2465 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2466
6435f0f6
RL
2467 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2468 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2469 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
9197c226 2470 ctx->iv, &num,
6435f0f6
RL
2471 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2472 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e
MC
2473 return 1;
2474 }
2475
2476 while (len >= MAXBITCHUNK) {
6435f0f6 2477 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2478 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
9197c226 2479 ctx->iv, &num,
6435f0f6
RL
2480 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2481 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2482 len -= MAXBITCHUNK;
604e591e
BE
2483 out += MAXBITCHUNK;
2484 in += MAXBITCHUNK;
0f113f3e 2485 }
6435f0f6
RL
2486 if (len) {
2487 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2488 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
9197c226 2489 ctx->iv, &num,
6435f0f6
RL
2490 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2491 EVP_CIPHER_CTX_set_num(ctx, num);
2492 }
0f113f3e
MC
2493
2494 return 1;
17f121de 2495}
8d1ebe0b 2496
0f113f3e
MC
2497static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2498 const unsigned char *in, size_t len)
d976f992 2499{
6435f0f6
RL
2500 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2501 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e
MC
2502
2503 if (dat->stream.ctr)
2504 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
9197c226 2505 ctx->iv,
6435f0f6
RL
2506 EVP_CIPHER_CTX_buf_noconst(ctx),
2507 &num, dat->stream.ctr);
0f113f3e
MC
2508 else
2509 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
9197c226 2510 ctx->iv,
6435f0f6
RL
2511 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2512 dat->block);
2513 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2514 return 1;
d976f992
AP
2515}
2516
0f113f3e
MC
2517BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2518 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2519 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
bdaa5415
DSH
2520
2521static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
0f113f3e 2522{
6435f0f6 2523 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
273a0218
BE
2524 if (gctx == NULL)
2525 return 0;
0f113f3e 2526 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
9197c226 2527 if (gctx->iv != c->iv)
0f113f3e
MC
2528 OPENSSL_free(gctx->iv);
2529 return 1;
2530}
bdaa5415
DSH
2531
2532static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 2533{
6435f0f6 2534 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
0f113f3e
MC
2535 switch (type) {
2536 case EVP_CTRL_INIT:
2537 gctx->key_set = 0;
2538 gctx->iv_set = 0;
7dddf2fc 2539 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
c5307d9c 2540 gctx->iv = c->iv;
0f113f3e
MC
2541 gctx->taglen = -1;
2542 gctx->iv_gen = 0;
2543 gctx->tls_aad_len = -1;
2544 return 1;
2545
7dddf2fc
SL
2546 case EVP_CTRL_GET_IVLEN:
2547 *(int *)ptr = gctx->ivlen;
2548 return 1;
2549
e640fa02 2550 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e
MC
2551 if (arg <= 0)
2552 return 0;
2553 /* Allocate memory for IV if needed */
2554 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
c5307d9c 2555 if (gctx->iv != c->iv)
0f113f3e 2556 OPENSSL_free(gctx->iv);
cdb10bae 2557 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
9311d0c4 2558 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
0f113f3e 2559 return 0;
cdb10bae 2560 }
0f113f3e
MC
2561 }
2562 gctx->ivlen = arg;
2563 return 1;
2564
e640fa02 2565 case EVP_CTRL_AEAD_SET_TAG:
c5307d9c 2566 if (arg <= 0 || arg > 16 || c->encrypt)
0f113f3e 2567 return 0;
c5307d9c 2568 memcpy(c->buf, ptr, arg);
0f113f3e
MC
2569 gctx->taglen = arg;
2570 return 1;
2571
e640fa02 2572 case EVP_CTRL_AEAD_GET_TAG:
c5307d9c 2573 if (arg <= 0 || arg > 16 || !c->encrypt
6435f0f6 2574 || gctx->taglen < 0)
0f113f3e 2575 return 0;
c5307d9c 2576 memcpy(ptr, c->buf, arg);
0f113f3e
MC
2577 return 1;
2578
2579 case EVP_CTRL_GCM_SET_IV_FIXED:
2580 /* Special case: -1 length restores whole IV */
2581 if (arg == -1) {
2582 memcpy(gctx->iv, ptr, gctx->ivlen);
2583 gctx->iv_gen = 1;
2584 return 1;
2585 }
2586 /*
2587 * Fixed field must be at least 4 bytes and invocation field at least
2588 * 8.
2589 */
2590 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2591 return 0;
2592 if (arg)
2593 memcpy(gctx->iv, ptr, arg);
c5307d9c 2594 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
16cfc2c9 2595 return 0;
0f113f3e
MC
2596 gctx->iv_gen = 1;
2597 return 1;
2598
2599 case EVP_CTRL_GCM_IV_GEN:
2600 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2601 return 0;
2602 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2603 if (arg <= 0 || arg > gctx->ivlen)
2604 arg = gctx->ivlen;
2605 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2606 /*
2607 * Invocation field will be at least 8 bytes in size and so no need
2608 * to check wrap around or increment more than last 8 bytes.
2609 */
2610 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2611 gctx->iv_set = 1;
2612 return 1;
2613
2614 case EVP_CTRL_GCM_SET_IV_INV:
c5307d9c 2615 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
0f113f3e
MC
2616 return 0;
2617 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2618 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2619 gctx->iv_set = 1;
2620 return 1;
2621
2622 case EVP_CTRL_AEAD_TLS1_AAD:
2623 /* Save the AAD for later use */
c8269881 2624 if (arg != EVP_AEAD_TLS1_AAD_LEN)
0f113f3e 2625 return 0;
c5307d9c 2626 memcpy(c->buf, ptr, arg);
0f113f3e 2627 gctx->tls_aad_len = arg;
d6b34570 2628 gctx->tls_enc_records = 0;
0f113f3e 2629 {
c5307d9c 2630 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
0f113f3e 2631 /* Correct length for explicit IV */
2198b3a5
AP
2632 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2633 return 0;
0f113f3e
MC
2634 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2635 /* If decrypting correct for tag too */
c5307d9c 2636 if (!c->encrypt) {
2198b3a5
AP
2637 if (len < EVP_GCM_TLS_TAG_LEN)
2638 return 0;
0f113f3e 2639 len -= EVP_GCM_TLS_TAG_LEN;
2198b3a5 2640 }
c5307d9c
AP
2641 c->buf[arg - 2] = len >> 8;
2642 c->buf[arg - 1] = len & 0xff;
0f113f3e
MC
2643 }
2644 /* Extra padding: tag appended to record */
2645 return EVP_GCM_TLS_TAG_LEN;
2646
2647 case EVP_CTRL_COPY:
2648 {
2649 EVP_CIPHER_CTX *out = ptr;
6435f0f6 2650 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
0f113f3e
MC
2651 if (gctx->gcm.key) {
2652 if (gctx->gcm.key != &gctx->ks)
2653 return 0;
2654 gctx_out->gcm.key = &gctx_out->ks;
2655 }
c5307d9c
AP
2656 if (gctx->iv == c->iv)
2657 gctx_out->iv = out->iv;
0f113f3e 2658 else {
cdb10bae 2659 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
9311d0c4 2660 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
0f113f3e 2661 return 0;
cdb10bae 2662 }
0f113f3e
MC
2663 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2664 }
2665 return 1;
2666 }
2667
2668 default:
2669 return -1;
2670
2671 }
2672}
bdaa5415
DSH
2673
2674static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
2675 const unsigned char *iv, int enc)
2676{
6435f0f6 2677 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
2678 if (!iv && !key)
2679 return 1;
2680 if (key) {
2681 do {
5158c763 2682#ifdef HWAES_CAPABLE
0f113f3e 2683 if (HWAES_CAPABLE) {
c5307d9c 2684 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2685 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2686 (block128_f) HWAES_encrypt);
5158c763 2687# ifdef HWAES_ctr32_encrypt_blocks
0f113f3e 2688 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
5158c763 2689# else
0f113f3e 2690 gctx->ctr = NULL;
5158c763 2691# endif
0f113f3e
MC
2692 break;
2693 } else
5158c763
MC
2694#endif
2695#ifdef BSAES_CAPABLE
0f113f3e 2696 if (BSAES_CAPABLE) {
c5307d9c 2697 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2698 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2699 (block128_f) AES_encrypt);
2700 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2701 break;
2702 } else
5158c763
MC
2703#endif
2704#ifdef VPAES_CAPABLE
0f113f3e 2705 if (VPAES_CAPABLE) {
c5307d9c 2706 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2707 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2708 (block128_f) vpaes_encrypt);
2709 gctx->ctr = NULL;
2710 break;
2711 } else
5158c763 2712#endif
0f113f3e
MC
2713 (void)0; /* terminate potentially open 'else' */
2714
c5307d9c 2715 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2716 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2717 (block128_f) AES_encrypt);
5158c763 2718#ifdef AES_CTR_ASM
0f113f3e 2719 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
5158c763 2720#else
0f113f3e 2721 gctx->ctr = NULL;
5158c763 2722#endif
0f113f3e
MC
2723 } while (0);
2724
2725 /*
2726 * If we have an iv can set it directly, otherwise use saved IV.
2727 */
2728 if (iv == NULL && gctx->iv_set)
2729 iv = gctx->iv;
2730 if (iv) {
2731 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2732 gctx->iv_set = 1;
2733 }
2734 gctx->key_set = 1;
2735 } else {
2736 /* If key set use IV, otherwise copy */
2737 if (gctx->key_set)
2738 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2739 else
2740 memcpy(gctx->iv, iv, gctx->ivlen);
2741 gctx->iv_set = 1;
2742 gctx->iv_gen = 0;
2743 }
2744 return 1;
2745}
2746
2747/*
2748 * Handle TLS GCM packet format. This consists of the last portion of the IV
28dd49fa
DSH
2749 * followed by the payload and finally the tag. On encrypt generate IV,
2750 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2751 * and verify tag.
2752 */
2753
2754static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
2755 const unsigned char *in, size_t len)
2756{
6435f0f6 2757 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
2758 int rv = -1;
2759 /* Encrypt/decrypt must be performed in place */
2760 if (out != in
2761 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2762 return -1;
df443918 2763
d6b34570
P
2764 /*
2765 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2766 * Requirements from SP 800-38D". The requirements is for one party to the
2767 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
2768 * side only.
2769 */
2770 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
9311d0c4 2771 ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
d6b34570
P
2772 goto err;
2773 }
2774
0f113f3e
MC
2775 /*
2776 * Set IV from start of buffer or generate IV and write to start of
2777 * buffer.
2778 */
c5307d9c
AP
2779 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
2780 : EVP_CTRL_GCM_SET_IV_INV,
0f113f3e
MC
2781 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2782 goto err;
2783 /* Use saved AAD */
c5307d9c 2784 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
0f113f3e
MC
2785 goto err;
2786 /* Fix buffer and length to point to payload */
2787 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2788 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2789 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
c5307d9c 2790 if (ctx->encrypt) {
0f113f3e
MC
2791 /* Encrypt payload */
2792 if (gctx->ctr) {
2793 size_t bulk = 0;
5158c763 2794#if defined(AES_GCM_ASM)
0f113f3e
MC
2795 if (len >= 32 && AES_GCM_ASM(gctx)) {
2796 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2797 return -1;
2798
2799 bulk = AES_gcm_encrypt(in, out, len,
2800 gctx->gcm.key,
2801 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2802 gctx->gcm.len.u[1] += bulk;
2803 }
5158c763 2804#endif
0f113f3e
MC
2805 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2806 in + bulk,
2807 out + bulk,
2808 len - bulk, gctx->ctr))
2809 goto err;
2810 } else {
2811 size_t bulk = 0;
5158c763 2812#if defined(AES_GCM_ASM2)
0f113f3e
MC
2813 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2814 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2815 return -1;
2816
2817 bulk = AES_gcm_encrypt(in, out, len,
2818 gctx->gcm.key,
2819 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2820 gctx->gcm.len.u[1] += bulk;
2821 }
5158c763 2822#endif
0f113f3e
MC
2823 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2824 in + bulk, out + bulk, len - bulk))
2825 goto err;
2826 }
2827 out += len;
2828 /* Finally write tag */
2829 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2830 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2831 } else {
2832 /* Decrypt */
2833 if (gctx->ctr) {
2834 size_t bulk = 0;
5158c763 2835#if defined(AES_GCM_ASM)
0f113f3e
MC
2836 if (len >= 16 && AES_GCM_ASM(gctx)) {
2837 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2838 return -1;
2839
2840 bulk = AES_gcm_decrypt(in, out, len,
2841 gctx->gcm.key,
2842 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2843 gctx->gcm.len.u[1] += bulk;
2844 }
5158c763 2845#endif
0f113f3e
MC
2846 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2847 in + bulk,
2848 out + bulk,
2849 len - bulk, gctx->ctr))
2850 goto err;
2851 } else {
2852 size_t bulk = 0;
5158c763 2853#if defined(AES_GCM_ASM2)
0f113f3e
MC
2854 if (len >= 16 && AES_GCM_ASM2(gctx)) {
2855 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2856 return -1;
2857
2858 bulk = AES_gcm_decrypt(in, out, len,
2859 gctx->gcm.key,
2860 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2861 gctx->gcm.len.u[1] += bulk;
2862 }
5158c763 2863#endif
0f113f3e
MC
2864 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2865 in + bulk, out + bulk, len - bulk))
2866 goto err;
2867 }
2868 /* Retrieve tag */
c5307d9c 2869 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
0f113f3e 2870 /* If tag mismatch wipe buffer */
c5307d9c 2871 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
0f113f3e
MC
2872 OPENSSL_cleanse(out, len);
2873 goto err;
2874 }
2875 rv = len;
2876 }
2877
2878 err:
2879 gctx->iv_set = 0;
2880 gctx->tls_aad_len = -1;
2881 return rv;
2882}
28dd49fa 2883
f844f9eb 2884#ifdef FIPS_MODULE
bcf082d1
SL
2885/*
2886 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
2887 *
2888 * See also 8.2.2 RBG-based construction.
2889 * Random construction consists of a free field (which can be NULL) and a
2890 * random field which will use a DRBG that can return at least 96 bits of
2891 * entropy strength. (The DRBG must be seeded by the FIPS module).
2892 */
2893static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
2894{
2895 int sz = gctx->ivlen - offset;
2896
2897 /* Must be at least 96 bits */
2898 if (sz <= 0 || gctx->ivlen < 12)
2899 return 0;
2900
2901 /* Use DRBG to generate random iv */
2902 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
2903 return 0;
2904 return 1;
2905}
f844f9eb 2906#endif /* FIPS_MODULE */
bcf082d1 2907
17f121de 2908static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
2909 const unsigned char *in, size_t len)
2910{
6435f0f6 2911 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
bcf082d1 2912
0f113f3e
MC
2913 /* If not set up, return error */
2914 if (!gctx->key_set)
2915 return -1;
2916
2917 if (gctx->tls_aad_len >= 0)
2918 return aes_gcm_tls_cipher(ctx, out, in, len);
2919
f844f9eb 2920#ifdef FIPS_MODULE
bcf082d1
SL
2921 /*
2922 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
2923 * The IV can still be set externally (the security policy will state that
2924 * this is not FIPS compliant). There are some applications
2925 * where setting the IV externally is the only option available.
2926 */
2927 if (!gctx->iv_set) {
2928 if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
2929 return -1;
2930 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2931 gctx->iv_set = 1;
2932 gctx->iv_gen_rand = 1;
2933 }
2934#else
0f113f3e
MC
2935 if (!gctx->iv_set)
2936 return -1;
f844f9eb 2937#endif /* FIPS_MODULE */
bcf082d1 2938
0f113f3e
MC
2939 if (in) {
2940 if (out == NULL) {
2941 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2942 return -1;
c5307d9c 2943 } else if (ctx->encrypt) {
0f113f3e
MC
2944 if (gctx->ctr) {
2945 size_t bulk = 0;
5158c763 2946#if defined(AES_GCM_ASM)
0f113f3e
MC
2947 if (len >= 32 && AES_GCM_ASM(gctx)) {
2948 size_t res = (16 - gctx->gcm.mres) % 16;
2949
2950 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2951 return -1;
2952
2953 bulk = AES_gcm_encrypt(in + res,
2954 out + res, len - res,
2955 gctx->gcm.key, gctx->gcm.Yi.c,
2956 gctx->gcm.Xi.u);
2957 gctx->gcm.len.u[1] += bulk;
2958 bulk += res;
2959 }
5158c763 2960#endif
0f113f3e
MC
2961 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2962 in + bulk,
2963 out + bulk,
2964 len - bulk, gctx->ctr))
2965 return -1;
2966 } else {
2967 size_t bulk = 0;
5158c763 2968#if defined(AES_GCM_ASM2)
0f113f3e
MC
2969 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2970 size_t res = (16 - gctx->gcm.mres) % 16;
2971
2972 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2973 return -1;
2974
2975 bulk = AES_gcm_encrypt(in + res,
2976 out + res, len - res,
2977 gctx->gcm.key, gctx->gcm.Yi.c,
2978 gctx->gcm.Xi.u);
2979 gctx->gcm.len.u[1] += bulk;
2980 bulk += res;
2981 }
5158c763 2982#endif
0f113f3e
MC
2983 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2984 in + bulk, out + bulk, len - bulk))
2985 return -1;
2986 }
2987 } else {
2988 if (gctx->ctr) {
2989 size_t bulk = 0;
5158c763 2990#if defined(AES_GCM_ASM)
0f113f3e
MC
2991 if (len >= 16 && AES_GCM_ASM(gctx)) {
2992 size_t res = (16 - gctx->gcm.mres) % 16;
2993
2994 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
2995 return -1;
2996
2997 bulk = AES_gcm_decrypt(in + res,
2998 out + res, len - res,
2999 gctx->gcm.key,
3000 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3001 gctx->gcm.len.u[1] += bulk;
3002 bulk += res;
3003 }
5158c763 3004#endif
0f113f3e
MC
3005 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3006 in + bulk,
3007 out + bulk,
3008 len - bulk, gctx->ctr))
3009 return -1;
3010 } else {
3011 size_t bulk = 0;
5158c763 3012#if defined(AES_GCM_ASM2)
0f113f3e
MC
3013 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3014 size_t res = (16 - gctx->gcm.mres) % 16;
3015
3016 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3017 return -1;
3018
3019 bulk = AES_gcm_decrypt(in + res,
3020 out + res, len - res,
3021 gctx->gcm.key,
3022 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3023 gctx->gcm.len.u[1] += bulk;
3024 bulk += res;
3025 }
5158c763 3026#endif
0f113f3e
MC
3027 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3028 in + bulk, out + bulk, len - bulk))
3029 return -1;
3030 }
3031 }
3032 return len;
3033 } else {
c5307d9c 3034 if (!ctx->encrypt) {
0f113f3e
MC
3035 if (gctx->taglen < 0)
3036 return -1;
c5307d9c 3037 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
0f113f3e
MC
3038 return -1;
3039 gctx->iv_set = 0;
3040 return 0;
3041 }
c5307d9c 3042 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
0f113f3e
MC
3043 gctx->taglen = 16;
3044 /* Don't reuse the IV */
3045 gctx->iv_set = 0;
3046 return 0;
3047 }
3048
3049}
3050
5158c763 3051#define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
0f113f3e
MC
3052 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3053 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
7dddf2fc 3054 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
0f113f3e
MC
3055
3056BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3057 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3058 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3059 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3060 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3061 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
32a2d8dd
DSH
3062
3063static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3064{
2c840201
P
3065 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3066
0f113f3e
MC
3067 if (type == EVP_CTRL_COPY) {
3068 EVP_CIPHER_CTX *out = ptr;
6435f0f6 3069 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
2c840201 3070
0f113f3e
MC
3071 if (xctx->xts.key1) {
3072 if (xctx->xts.key1 != &xctx->ks1)
3073 return 0;
3074 xctx_out->xts.key1 = &xctx_out->ks1;
3075 }
3076 if (xctx->xts.key2) {
3077 if (xctx->xts.key2 != &xctx->ks2)
3078 return 0;
3079 xctx_out->xts.key2 = &xctx_out->ks2;
3080 }
3081 return 1;
3082 } else if (type != EVP_CTRL_INIT)
3083 return -1;
3084 /* key1 and key2 are used as an indicator both key and IV are set */
3085 xctx->xts.key1 = NULL;
3086 xctx->xts.key2 = NULL;
3087 return 1;
3088}
32a2d8dd
DSH
3089
3090static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3091 const unsigned char *iv, int enc)
3092{
6435f0f6 3093 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 3094
0f113f3e
MC
3095 if (!iv && !key)
3096 return 1;
3097
3538b0f7 3098 if (key) {
0f113f3e 3099 do {
3538b0f7
P
3100 /* The key is two half length keys in reality */
3101 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3102 const int bits = bytes * 8;
3103
3104 /*
3105 * Verify that the two keys are different.
3106 *
3107 * This addresses the vulnerability described in Rogaway's
3108 * September 2004 paper:
3109 *
3110 * "Efficient Instantiations of Tweakable Blockciphers and
3111 * Refinements to Modes OCB and PMAC".
3112 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3113 *
3114 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3115 * that:
3116 * "The check for Key_1 != Key_2 shall be done at any place
3117 * BEFORE using the keys in the XTS-AES algorithm to process
3118 * data with them."
3119 */
2c840201
P
3120 if ((!allow_insecure_decrypt || enc)
3121 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 3122 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
3123 return 0;
3124 }
3125
5158c763 3126#ifdef AES_XTS_ASM
0f113f3e 3127 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
5158c763 3128#else
0f113f3e 3129 xctx->stream = NULL;
5158c763 3130#endif
0f113f3e 3131 /* key_len is two AES keys */
5158c763 3132#ifdef HWAES_CAPABLE
0f113f3e
MC
3133 if (HWAES_CAPABLE) {
3134 if (enc) {
3538b0f7 3135 HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e 3136 xctx->xts.block1 = (block128_f) HWAES_encrypt;
46f047d7
AP
3137# ifdef HWAES_xts_encrypt
3138 xctx->stream = HWAES_xts_encrypt;
3139# endif
0f113f3e 3140 } else {
3538b0f7 3141 HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e 3142 xctx->xts.block1 = (block128_f) HWAES_decrypt;
46f047d7
AP
3143# ifdef HWAES_xts_decrypt
3144 xctx->stream = HWAES_xts_decrypt;
3145#endif
0f113f3e
MC
3146 }
3147
3538b0f7 3148 HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3149 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3150
3151 xctx->xts.key1 = &xctx->ks1;
3152 break;
3153 } else
5158c763
MC
3154#endif
3155#ifdef BSAES_CAPABLE
0f113f3e
MC
3156 if (BSAES_CAPABLE)
3157 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3158 else
5158c763
MC
3159#endif
3160#ifdef VPAES_CAPABLE
0f113f3e
MC
3161 if (VPAES_CAPABLE) {
3162 if (enc) {
3538b0f7 3163 vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3164 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3165 } else {
3538b0f7 3166 vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3167 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3168 }
3169
3538b0f7 3170 vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3171 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3172
3173 xctx->xts.key1 = &xctx->ks1;
3174 break;
3175 } else
5158c763 3176#endif
0f113f3e
MC
3177 (void)0; /* terminate potentially open 'else' */
3178
3179 if (enc) {
3538b0f7 3180 AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3181 xctx->xts.block1 = (block128_f) AES_encrypt;
3182 } else {
3538b0f7 3183 AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3184 xctx->xts.block1 = (block128_f) AES_decrypt;
3185 }
3186
3538b0f7 3187 AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3188 xctx->xts.block2 = (block128_f) AES_encrypt;
3189
3190 xctx->xts.key1 = &xctx->ks1;
3191 } while (0);
3538b0f7 3192 }
0f113f3e
MC
3193
3194 if (iv) {
3195 xctx->xts.key2 = &xctx->ks2;
9197c226 3196 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
3197 }
3198
3199 return 1;
3200}
32a2d8dd 3201
17f121de 3202static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3203 const unsigned char *in, size_t len)
3204{
6435f0f6 3205 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
95eda4f0
P
3206
3207 if (xctx->xts.key1 == NULL
3208 || xctx->xts.key2 == NULL
3209 || out == NULL
3210 || in == NULL
3211 || len < AES_BLOCK_SIZE)
0f113f3e 3212 return 0;
95eda4f0 3213
5516c19b 3214 /*
79c44b4e 3215 * Impose a limit of 2^20 blocks per data unit as specified by
5516c19b
P
3216 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3217 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3218 * NIST SP 800-38E mandates the same limit.
3219 */
3220 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
9311d0c4 3221 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
5516c19b
P
3222 return 0;
3223 }
3224
0f113f3e
MC
3225 if (xctx->stream)
3226 (*xctx->stream) (in, out, len,
6435f0f6 3227 xctx->xts.key1, xctx->xts.key2,
9197c226
BK
3228 ctx->iv);
3229 else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
6435f0f6 3230 EVP_CIPHER_CTX_encrypting(ctx)))
0f113f3e
MC
3231 return 0;
3232 return 1;
3233}
3234
5158c763 3235#define aes_xts_cleanup NULL
0f113f3e 3236
5158c763 3237#define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
0f113f3e
MC
3238 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3239 | EVP_CIPH_CUSTOM_COPY)
3240
3241BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3242 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
23916810
DSH
3243
3244static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3245{
6435f0f6 3246 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
0f113f3e
MC
3247 switch (type) {
3248 case EVP_CTRL_INIT:
3249 cctx->key_set = 0;
3250 cctx->iv_set = 0;
3251 cctx->L = 8;
3252 cctx->M = 12;
3253 cctx->tag_set = 0;
3254 cctx->len_set = 0;
e75c5a79
DSH
3255 cctx->tls_aad_len = -1;
3256 return 1;
3257
7dddf2fc
SL
3258 case EVP_CTRL_GET_IVLEN:
3259 *(int *)ptr = 15 - cctx->L;
3260 return 1;
3261
e75c5a79
DSH
3262 case EVP_CTRL_AEAD_TLS1_AAD:
3263 /* Save the AAD for later use */
3264 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3265 return 0;
6435f0f6 3266 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
e75c5a79
DSH
3267 cctx->tls_aad_len = arg;
3268 {
6435f0f6
RL
3269 uint16_t len =
3270 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3271 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
e75c5a79 3272 /* Correct length for explicit IV */
2198b3a5
AP
3273 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3274 return 0;
e75c5a79
DSH
3275 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3276 /* If decrypting correct for tag too */
2198b3a5
AP
3277 if (!EVP_CIPHER_CTX_encrypting(c)) {
3278 if (len < cctx->M)
3279 return 0;
e75c5a79 3280 len -= cctx->M;
2198b3a5 3281 }
6435f0f6
RL
3282 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3283 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
e75c5a79
DSH
3284 }
3285 /* Extra padding: tag appended to record */
3286 return cctx->M;
3287
3288 case EVP_CTRL_CCM_SET_IV_FIXED:
3289 /* Sanity check length */
3290 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3291 return 0;
3292 /* Just copy to first part of IV */
9197c226 3293 memcpy(c->iv, ptr, arg);
0f113f3e
MC
3294 return 1;
3295
e640fa02 3296 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e 3297 arg = 15 - arg;
018fcbec 3298 /* fall thru */
0f113f3e
MC
3299 case EVP_CTRL_CCM_SET_L:
3300 if (arg < 2 || arg > 8)
3301 return 0;
3302 cctx->L = arg;
3303 return 1;
3304
e640fa02 3305 case EVP_CTRL_AEAD_SET_TAG:
0f113f3e
MC
3306 if ((arg & 1) || arg < 4 || arg > 16)
3307 return 0;
6435f0f6 3308 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
0f113f3e
MC
3309 return 0;
3310 if (ptr) {
3311 cctx->tag_set = 1;
6435f0f6 3312 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
0f113f3e
MC
3313 }
3314 cctx->M = arg;
3315 return 1;
3316
e640fa02 3317 case EVP_CTRL_AEAD_GET_TAG:
6435f0f6 3318 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
0f113f3e
MC
3319 return 0;
3320 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3321 return 0;
3322 cctx->tag_set = 0;
3323 cctx->iv_set = 0;
3324 cctx->len_set = 0;
3325 return 1;
3326
3327 case EVP_CTRL_COPY:
3328 {
3329 EVP_CIPHER_CTX *out = ptr;
6435f0f6 3330 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
0f113f3e
MC
3331 if (cctx->ccm.key) {
3332 if (cctx->ccm.key != &cctx->ks)
3333 return 0;
3334 cctx_out->ccm.key = &cctx_out->ks;
3335 }
3336 return 1;
3337 }
3338
3339 default:
3340 return -1;
3341
3342 }
3343}
23916810
DSH
3344
3345static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3346 const unsigned char *iv, int enc)
3347{
6435f0f6 3348 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
3349 if (!iv && !key)
3350 return 1;
3351 if (key)
3352 do {
5158c763 3353#ifdef HWAES_CAPABLE
0f113f3e 3354 if (HWAES_CAPABLE) {
6435f0f6
RL
3355 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3356 &cctx->ks.ks);
0f113f3e
MC
3357
3358 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3359 &cctx->ks, (block128_f) HWAES_encrypt);
3360 cctx->str = NULL;
3361 cctx->key_set = 1;
3362 break;
3363 } else
5158c763
MC
3364#endif
3365#ifdef VPAES_CAPABLE
0f113f3e 3366 if (VPAES_CAPABLE) {
6435f0f6
RL
3367 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3368 &cctx->ks.ks);
0f113f3e
MC
3369 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3370 &cctx->ks, (block128_f) vpaes_encrypt);
3371 cctx->str = NULL;
3372 cctx->key_set = 1;
3373 break;
3374 }
5158c763 3375#endif
6435f0f6
RL
3376 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3377 &cctx->ks.ks);
0f113f3e
MC
3378 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3379 &cctx->ks, (block128_f) AES_encrypt);
3380 cctx->str = NULL;
3381 cctx->key_set = 1;
3382 } while (0);
3383 if (iv) {
9197c226 3384 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
3385 cctx->iv_set = 1;
3386 }
3387 return 1;
3388}
23916810 3389
e75c5a79
DSH
3390static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3391 const unsigned char *in, size_t len)
3392{
6435f0f6 3393 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
e75c5a79
DSH
3394 CCM128_CONTEXT *ccm = &cctx->ccm;
3395 /* Encrypt/decrypt must be performed in place */
3396 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3397 return -1;
3398 /* If encrypting set explicit IV from sequence number (start of AAD) */
6435f0f6
RL
3399 if (EVP_CIPHER_CTX_encrypting(ctx))
3400 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3401 EVP_CCM_TLS_EXPLICIT_IV_LEN);
e75c5a79 3402 /* Get rest of IV from explicit IV */
9197c226 3403 memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
6435f0f6 3404 EVP_CCM_TLS_EXPLICIT_IV_LEN);
e75c5a79
DSH
3405 /* Correct length value */
3406 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
9197c226 3407 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
6435f0f6 3408 len))
e75c5a79
DSH
3409 return -1;
3410 /* Use saved AAD */
6435f0f6 3411 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
e75c5a79
DSH
3412 /* Fix buffer to point to payload */
3413 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3414 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
6435f0f6 3415 if (EVP_CIPHER_CTX_encrypting(ctx)) {
e75c5a79
DSH
3416 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3417 cctx->str) :
3418 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3419 return -1;
3420 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3421 return -1;
3422 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3423 } else {
3424 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3425 cctx->str) :
3426 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3427 unsigned char tag[16];
3428 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3429 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3430 return len;
3431 }
3432 }
3433 OPENSSL_cleanse(out, len);
3434 return -1;
3435 }
3436}
3437
17f121de 3438static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3439 const unsigned char *in, size_t len)
3440{
6435f0f6 3441 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
3442 CCM128_CONTEXT *ccm = &cctx->ccm;
3443 /* If not set up, return error */
e75c5a79
DSH
3444 if (!cctx->key_set)
3445 return -1;
3446
3447 if (cctx->tls_aad_len >= 0)
3448 return aes_ccm_tls_cipher(ctx, out, in, len);
3449
197421b1
DSH
3450 /* EVP_*Final() doesn't return any data */
3451 if (in == NULL && out != NULL)
3452 return 0;
3453
e75c5a79 3454 if (!cctx->iv_set)
0f113f3e 3455 return -1;
e75c5a79 3456
0f113f3e
MC
3457 if (!out) {
3458 if (!in) {
9197c226 3459 if (CRYPTO_ccm128_setiv(ccm, ctx->iv,
6435f0f6 3460 15 - cctx->L, len))
0f113f3e
MC
3461 return -1;
3462 cctx->len_set = 1;
3463 return len;
3464 }
3465 /* If have AAD need message length */
3466 if (!cctx->len_set && len)
3467 return -1;
3468 CRYPTO_ccm128_aad(ccm, in, len);
3469 return len;
3470 }
67c81ec3
TN
3471
3472 /* The tag must be set before actually decrypting data */
3473 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3474 return -1;
3475
0f113f3e
MC
3476 /* If not set length yet do it */
3477 if (!cctx->len_set) {
9197c226 3478 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
0f113f3e
MC
3479 return -1;
3480 cctx->len_set = 1;
3481 }
6435f0f6 3482 if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3483 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3484 cctx->str) :
3485 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3486 return -1;
3487 cctx->tag_set = 1;
3488 return len;
3489 } else {
3490 int rv = -1;
3491 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3492 cctx->str) :
3493 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3494 unsigned char tag[16];
3495 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
6435f0f6
RL
3496 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3497 cctx->M))
0f113f3e
MC
3498 rv = len;
3499 }
3500 }
3501 if (rv == -1)
3502 OPENSSL_cleanse(out, len);
3503 cctx->iv_set = 0;
3504 cctx->tag_set = 0;
3505 cctx->len_set = 0;
3506 return rv;
3507 }
0f113f3e
MC
3508}
3509
5158c763 3510#define aes_ccm_cleanup NULL
0f113f3e 3511
e75c5a79
DSH
3512BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3513 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3514 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3515 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3516 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3517 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
0f113f3e
MC
3518
3519typedef struct {
3520 union {
39147079 3521 OSSL_UNION_ALIGN;
0f113f3e
MC
3522 AES_KEY ks;
3523 } ks;
3524 /* Indicates if IV has been set */
3525 unsigned char *iv;
3526} EVP_AES_WRAP_CTX;
97cf1f6c
DSH
3527
3528static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3529 const unsigned char *iv, int enc)
3530{
6435f0f6 3531 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
0f113f3e
MC
3532 if (!iv && !key)
3533 return 1;
3534 if (key) {
6435f0f6
RL
3535 if (EVP_CIPHER_CTX_encrypting(ctx))
3536 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3537 &wctx->ks.ks);
0f113f3e 3538 else
6435f0f6
RL
3539 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3540 &wctx->ks.ks);
0f113f3e
MC
3541 if (!iv)
3542 wctx->iv = NULL;
3543 }
3544 if (iv) {
9197c226
BK
3545 memcpy(ctx->iv, iv, EVP_CIPHER_CTX_iv_length(ctx));
3546 wctx->iv = ctx->iv;
0f113f3e
MC
3547 }
3548 return 1;
3549}
97cf1f6c
DSH
3550
3551static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3552 const unsigned char *in, size_t inlen)
3553{
6435f0f6 3554 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
0f113f3e
MC
3555 size_t rv;
3556 /* AES wrap with padding has IV length of 4, without padding 8 */
3557 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3558 /* No final operation so always return zero length */
3559 if (!in)
3560 return 0;
3561 /* Input length must always be non-zero */
3562 if (!inlen)
3563 return -1;
3564 /* If decrypting need at least 16 bytes and multiple of 8 */
6435f0f6 3565 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
0f113f3e
MC
3566 return -1;
3567 /* If not padding input must be multiple of 8 */
3568 if (!pad && inlen & 0x7)
3569 return -1;
6d777689 3570 if (ossl_is_partially_overlapping(out, in, inlen)) {
9311d0c4 3571 ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
7141ba31
MC
3572 return 0;
3573 }
0f113f3e 3574 if (!out) {
6435f0f6 3575 if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3576 /* If padding round up to multiple of 8 */
3577 if (pad)
3578 inlen = (inlen + 7) / 8 * 8;
3579 /* 8 byte prefix */
3580 return inlen + 8;
3581 } else {
3582 /*
3583 * If not padding output will be exactly 8 bytes smaller than
3584 * input. If padding it will be at least 8 bytes smaller but we
3585 * don't know how much.
3586 */
3587 return inlen - 8;
3588 }
3589 }
3590 if (pad) {
6435f0f6 3591 if (EVP_CIPHER_CTX_encrypting(ctx))
0f113f3e
MC
3592 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3593 out, in, inlen,
3594 (block128_f) AES_encrypt);
3595 else
3596 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3597 out, in, inlen,
3598 (block128_f) AES_decrypt);
3599 } else {
6435f0f6 3600 if (EVP_CIPHER_CTX_encrypting(ctx))
0f113f3e
MC
3601 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3602 out, in, inlen, (block128_f) AES_encrypt);
3603 else
3604 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3605 out, in, inlen, (block128_f) AES_decrypt);
3606 }
3607 return rv ? (int)rv : -1;
3608}
3609
5158c763 3610#define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
0f113f3e
MC
3611 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3612 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
97cf1f6c
DSH
3613
3614static const EVP_CIPHER aes_128_wrap = {
0f113f3e
MC
3615 NID_id_aes128_wrap,
3616 8, 16, 8, WRAP_FLAGS,
3617 aes_wrap_init_key, aes_wrap_cipher,
3618 NULL,
3619 sizeof(EVP_AES_WRAP_CTX),
3620 NULL, NULL, NULL, NULL
3621};
97cf1f6c
DSH
3622
3623const EVP_CIPHER *EVP_aes_128_wrap(void)
0f113f3e
MC
3624{
3625 return &aes_128_wrap;
3626}
97cf1f6c
DSH
3627
3628static const EVP_CIPHER aes_192_wrap = {
0f113f3e
MC
3629 NID_id_aes192_wrap,
3630 8, 24, 8, WRAP_FLAGS,
3631 aes_wrap_init_key, aes_wrap_cipher,
3632 NULL,
3633 sizeof(EVP_AES_WRAP_CTX),
3634 NULL, NULL, NULL, NULL
3635};
97cf1f6c
DSH
3636
3637const EVP_CIPHER *EVP_aes_192_wrap(void)
0f113f3e
MC
3638{
3639 return &aes_192_wrap;
3640}
97cf1f6c
DSH
3641
3642static const EVP_CIPHER aes_256_wrap = {
0f113f3e
MC
3643 NID_id_aes256_wrap,
3644 8, 32, 8, WRAP_FLAGS,
3645 aes_wrap_init_key, aes_wrap_cipher,
3646 NULL,
3647 sizeof(EVP_AES_WRAP_CTX),
3648 NULL, NULL, NULL, NULL
3649};
97cf1f6c
DSH
3650
3651const EVP_CIPHER *EVP_aes_256_wrap(void)
0f113f3e
MC
3652{
3653 return &aes_256_wrap;
3654}
97cf1f6c 3655
d31fed73 3656static const EVP_CIPHER aes_128_wrap_pad = {
0f113f3e
MC
3657 NID_id_aes128_wrap_pad,
3658 8, 16, 4, WRAP_FLAGS,
3659 aes_wrap_init_key, aes_wrap_cipher,
3660 NULL,
3661 sizeof(EVP_AES_WRAP_CTX),
3662 NULL, NULL, NULL, NULL
3663};
d31fed73
DSH
3664
3665const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
0f113f3e
MC
3666{
3667 return &aes_128_wrap_pad;
3668}
d31fed73
DSH
3669
3670static const EVP_CIPHER aes_192_wrap_pad = {
0f113f3e
MC
3671 NID_id_aes192_wrap_pad,
3672 8, 24, 4, WRAP_FLAGS,
3673 aes_wrap_init_key, aes_wrap_cipher,
3674 NULL,
3675 sizeof(EVP_AES_WRAP_CTX),
3676 NULL, NULL, NULL, NULL
3677};
d31fed73
DSH
3678
3679const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
0f113f3e
MC
3680{
3681 return &aes_192_wrap_pad;
3682}
d31fed73
DSH
3683
3684static const EVP_CIPHER aes_256_wrap_pad = {
0f113f3e
MC
3685 NID_id_aes256_wrap_pad,
3686 8, 32, 4, WRAP_FLAGS,
3687 aes_wrap_init_key, aes_wrap_cipher,
3688 NULL,
3689 sizeof(EVP_AES_WRAP_CTX),
3690 NULL, NULL, NULL, NULL
3691};
d31fed73
DSH
3692
3693const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
0f113f3e
MC
3694{
3695 return &aes_256_wrap_pad;
3696}
d31fed73 3697
5158c763 3698#ifndef OPENSSL_NO_OCB
e6b336ef 3699static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3700{
6435f0f6 3701 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
0f113f3e
MC
3702 EVP_CIPHER_CTX *newc;
3703 EVP_AES_OCB_CTX *new_octx;
3704
3705 switch (type) {
3706 case EVP_CTRL_INIT:
3707 octx->key_set = 0;
3708 octx->iv_set = 0;
7dddf2fc 3709 octx->ivlen = EVP_CIPHER_iv_length(c->cipher);
9197c226 3710 octx->iv = c->iv;
0f113f3e
MC
3711 octx->taglen = 16;
3712 octx->data_buf_len = 0;
3713 octx->aad_buf_len = 0;
3714 return 1;
3715
7dddf2fc
SL
3716 case EVP_CTRL_GET_IVLEN:
3717 *(int *)ptr = octx->ivlen;
3718 return 1;
3719
e640fa02 3720 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e
MC
3721 /* IV len must be 1 to 15 */
3722 if (arg <= 0 || arg > 15)
3723 return 0;
3724
3725 octx->ivlen = arg;
3726 return 1;
3727
e640fa02 3728 case EVP_CTRL_AEAD_SET_TAG:
12a765a5 3729 if (ptr == NULL) {
d57d135c
MC
3730 /* Tag len must be 0 to 16 */
3731 if (arg < 0 || arg > 16)
3732 return 0;
3733
3734 octx->taglen = arg;
3735 return 1;
3736 }
6435f0f6 3737 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
0f113f3e
MC
3738 return 0;
3739 memcpy(octx->tag, ptr, arg);
3740 return 1;
3741
e640fa02 3742 case EVP_CTRL_AEAD_GET_TAG:
6435f0f6 3743 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
0f113f3e
MC
3744 return 0;
3745
3746 memcpy(ptr, octx->tag, arg);
3747 return 1;
3748
3749 case EVP_CTRL_COPY:
3750 newc = (EVP_CIPHER_CTX *)ptr;
6435f0f6 3751 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
0f113f3e 3752 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
bdc985b1
AP
3753 &new_octx->ksenc.ks,
3754 &new_octx->ksdec.ks);
0f113f3e
MC
3755
3756 default:
3757 return -1;
3758
3759 }
3760}
e6b336ef
MC
3761
3762static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3763 const unsigned char *iv, int enc)
3764{
6435f0f6 3765 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
3766 if (!iv && !key)
3767 return 1;
3768 if (key) {
3769 do {
3770 /*
3771 * We set both the encrypt and decrypt key here because decrypt
3772 * needs both. We could possibly optimise to remove setting the
3773 * decrypt for an encryption operation.
3774 */
5158c763 3775# ifdef HWAES_CAPABLE
02dc0b82 3776 if (HWAES_CAPABLE) {
6435f0f6
RL
3777 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3778 &octx->ksenc.ks);
3779 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3780 &octx->ksdec.ks);
02dc0b82
AP
3781 if (!CRYPTO_ocb128_init(&octx->ocb,
3782 &octx->ksenc.ks, &octx->ksdec.ks,
3783 (block128_f) HWAES_encrypt,
3784 (block128_f) HWAES_decrypt,
3785 enc ? HWAES_ocb_encrypt
3786 : HWAES_ocb_decrypt))
3787 return 0;
3788 break;
3789 }
5158c763
MC
3790# endif
3791# ifdef VPAES_CAPABLE
0f113f3e 3792 if (VPAES_CAPABLE) {
6435f0f6
RL
3793 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3794 &octx->ksenc.ks);
3795 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3796 &octx->ksdec.ks);
bdc985b1
AP
3797 if (!CRYPTO_ocb128_init(&octx->ocb,
3798 &octx->ksenc.ks, &octx->ksdec.ks,
3799 (block128_f) vpaes_encrypt,
bd30091c
AP
3800 (block128_f) vpaes_decrypt,
3801 NULL))
0f113f3e
MC
3802 return 0;
3803 break;
3804 }
5158c763 3805# endif
6435f0f6
RL
3806 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3807 &octx->ksenc.ks);
3808 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3809 &octx->ksdec.ks);
bdc985b1
AP
3810 if (!CRYPTO_ocb128_init(&octx->ocb,
3811 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 3812 (block128_f) AES_encrypt,
bd30091c
AP
3813 (block128_f) AES_decrypt,
3814 NULL))
0f113f3e
MC
3815 return 0;
3816 }
3817 while (0);
3818
3819 /*
3820 * If we have an iv we can set it directly, otherwise use saved IV.
3821 */
3822 if (iv == NULL && octx->iv_set)
3823 iv = octx->iv;
3824 if (iv) {
3825 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3826 != 1)
3827 return 0;
3828 octx->iv_set = 1;
3829 }
3830 octx->key_set = 1;
3831 } else {
3832 /* If key set use IV, otherwise copy */
3833 if (octx->key_set)
3834 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3835 else
3836 memcpy(octx->iv, iv, octx->ivlen);
3837 octx->iv_set = 1;
3838 }
3839 return 1;
3840}
e6b336ef
MC
3841
3842static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3843 const unsigned char *in, size_t len)
3844{
3845 unsigned char *buf;
3846 int *buf_len;
3847 int written_len = 0;
3848 size_t trailing_len;
6435f0f6 3849 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
3850
3851 /* If IV or Key not set then return error */
3852 if (!octx->iv_set)
3853 return -1;
3854
3855 if (!octx->key_set)
3856 return -1;
3857
0ba5a9ea 3858 if (in != NULL) {
0f113f3e
MC
3859 /*
3860 * Need to ensure we are only passing full blocks to low level OCB
3861 * routines. We do it here rather than in EVP_EncryptUpdate/
3862 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3863 * and those routines don't support that
3864 */
3865
3866 /* Are we dealing with AAD or normal data here? */
3867 if (out == NULL) {
3868 buf = octx->aad_buf;
3869 buf_len = &(octx->aad_buf_len);
3870 } else {
3871 buf = octx->data_buf;
3872 buf_len = &(octx->data_buf_len);
7141ba31 3873
6d777689 3874 if (ossl_is_partially_overlapping(out + *buf_len, in, len)) {
9311d0c4 3875 ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
7141ba31
MC
3876 return 0;
3877 }
0f113f3e
MC
3878 }
3879
3880 /*
3881 * If we've got a partially filled buffer from a previous call then
3882 * use that data first
3883 */
0ba5a9ea 3884 if (*buf_len > 0) {
0f113f3e
MC
3885 unsigned int remaining;
3886
0ba5a9ea 3887 remaining = AES_BLOCK_SIZE - (*buf_len);
0f113f3e
MC
3888 if (remaining > len) {
3889 memcpy(buf + (*buf_len), in, len);
3890 *(buf_len) += len;
3891 return 0;
3892 }
3893 memcpy(buf + (*buf_len), in, remaining);
3894
3895 /*
3896 * If we get here we've filled the buffer, so process it
3897 */
3898 len -= remaining;
3899 in += remaining;
3900 if (out == NULL) {
0ba5a9ea 3901 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
0f113f3e 3902 return -1;
6435f0f6 3903 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
0ba5a9ea
MC
3904 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3905 AES_BLOCK_SIZE))
0f113f3e
MC
3906 return -1;
3907 } else {
0ba5a9ea
MC
3908 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3909 AES_BLOCK_SIZE))
0f113f3e
MC
3910 return -1;
3911 }
0ba5a9ea 3912 written_len = AES_BLOCK_SIZE;
0f113f3e 3913 *buf_len = 0;
7c12c7b6
MC
3914 if (out != NULL)
3915 out += AES_BLOCK_SIZE;
0f113f3e
MC
3916 }
3917
3918 /* Do we have a partial block to handle at the end? */
0ba5a9ea 3919 trailing_len = len % AES_BLOCK_SIZE;
0f113f3e
MC
3920
3921 /*
3922 * If we've got some full blocks to handle, then process these first
3923 */
3924 if (len != trailing_len) {
3925 if (out == NULL) {
3926 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3927 return -1;
6435f0f6 3928 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3929 if (!CRYPTO_ocb128_encrypt
3930 (&octx->ocb, in, out, len - trailing_len))
3931 return -1;
3932 } else {
3933 if (!CRYPTO_ocb128_decrypt
3934 (&octx->ocb, in, out, len - trailing_len))
3935 return -1;
3936 }
3937 written_len += len - trailing_len;
3938 in += len - trailing_len;
3939 }
3940
3941 /* Handle any trailing partial block */
0ba5a9ea 3942 if (trailing_len > 0) {
0f113f3e
MC
3943 memcpy(buf, in, trailing_len);
3944 *buf_len = trailing_len;
3945 }
3946
3947 return written_len;
3948 } else {
3949 /*
3950 * First of all empty the buffer of any partial block that we might
3951 * have been provided - both for data and AAD
3952 */
0ba5a9ea 3953 if (octx->data_buf_len > 0) {
6435f0f6 3954 if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3955 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
3956 octx->data_buf_len))
3957 return -1;
3958 } else {
3959 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
3960 octx->data_buf_len))
3961 return -1;
3962 }
3963 written_len = octx->data_buf_len;
3964 octx->data_buf_len = 0;
3965 }
0ba5a9ea 3966 if (octx->aad_buf_len > 0) {
0f113f3e
MC
3967 if (!CRYPTO_ocb128_aad
3968 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
3969 return -1;
3970 octx->aad_buf_len = 0;
3971 }
3972 /* If decrypting then verify */
6435f0f6 3973 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3974 if (octx->taglen < 0)
3975 return -1;
3976 if (CRYPTO_ocb128_finish(&octx->ocb,
3977 octx->tag, octx->taglen) != 0)
3978 return -1;
3979 octx->iv_set = 0;
3980 return written_len;
3981 }
3982 /* If encrypting then just get the tag */
3983 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
3984 return -1;
3985 /* Don't reuse the IV */
3986 octx->iv_set = 0;
3987 return written_len;
3988 }
3989}
e6b336ef
MC
3990
3991static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
0f113f3e 3992{
6435f0f6 3993 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
0f113f3e
MC
3994 CRYPTO_ocb128_cleanup(&octx->ocb);
3995 return 1;
3996}
e6b336ef 3997
c4aede20
MC
3998BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
3999 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4000BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4001 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4002BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4003 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
5158c763 4004#endif /* OPENSSL_NO_OCB */