]> git.ipfire.org Git - thirdparty/openssl.git/blame - crypto/evp/e_aes.c
fix some code with obvious wrong coding style
[thirdparty/openssl.git] / crypto / evp / e_aes.c
CommitLineData
aa6bb135 1/*
3c2bdd7d 2 * Copyright 2001-2021 The OpenSSL Project Authors. All Rights Reserved.
deb2c1a1 3 *
4a8b0c55 4 * Licensed under the Apache License 2.0 (the "License"). You may not use
aa6bb135
RS
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
deb2c1a1
DSH
8 */
9
c72fa255
MC
10/*
11 * This file uses the low level AES functions (which are deprecated for
12 * non-internal use) in order to implement the EVP AES ciphers.
13 */
14#include "internal/deprecated.h"
15
743694a6
MC
16#include <string.h>
17#include <assert.h>
8c84b677 18#include <openssl/opensslconf.h>
5158c763
MC
19#include <openssl/crypto.h>
20#include <openssl/evp.h>
21#include <openssl/err.h>
5158c763 22#include <openssl/aes.h>
743694a6
MC
23#include <openssl/rand.h>
24#include <openssl/cmac.h>
25f2138b 25#include "crypto/evp.h"
39147079 26#include "internal/cryptlib.h"
25f2138b
DMSP
27#include "crypto/modes.h"
28#include "crypto/siv.h"
cc731bc3 29#include "crypto/aes_platform.h"
706457b7 30#include "evp_local.h"
0f113f3e
MC
31
32typedef struct {
33 union {
39147079 34 OSSL_UNION_ALIGN;
0f113f3e
MC
35 AES_KEY ks;
36 } ks;
37 block128_f block;
38 union {
39 cbc128_f cbc;
40 ctr128_f ctr;
41 } stream;
42} EVP_AES_KEY;
43
44typedef struct {
45 union {
39147079 46 OSSL_UNION_ALIGN;
0f113f3e
MC
47 AES_KEY ks;
48 } ks; /* AES key schedule to use */
49 int key_set; /* Set if key initialised */
50 int iv_set; /* Set if an iv is set */
51 GCM128_CONTEXT gcm;
52 unsigned char *iv; /* Temporary IV store */
53 int ivlen; /* IV length */
54 int taglen;
55 int iv_gen; /* It is OK to generate IVs */
bcf082d1 56 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
0f113f3e 57 int tls_aad_len; /* TLS AAD length */
d6b34570 58 uint64_t tls_enc_records; /* Number of TLS records encrypted */
0f113f3e
MC
59 ctr128_f ctr;
60} EVP_AES_GCM_CTX;
61
62typedef struct {
63 union {
39147079 64 OSSL_UNION_ALIGN;
0f113f3e
MC
65 AES_KEY ks;
66 } ks1, ks2; /* AES key schedules to use */
67 XTS128_CONTEXT xts;
68 void (*stream) (const unsigned char *in,
69 unsigned char *out, size_t length,
70 const AES_KEY *key1, const AES_KEY *key2,
71 const unsigned char iv[16]);
72} EVP_AES_XTS_CTX;
73
f844f9eb 74#ifdef FIPS_MODULE
2c840201
P
75static const int allow_insecure_decrypt = 0;
76#else
77static const int allow_insecure_decrypt = 1;
78#endif
79
0f113f3e
MC
80typedef struct {
81 union {
39147079 82 OSSL_UNION_ALIGN;
0f113f3e
MC
83 AES_KEY ks;
84 } ks; /* AES key schedule to use */
85 int key_set; /* Set if key initialised */
86 int iv_set; /* Set if an iv is set */
87 int tag_set; /* Set if tag is valid */
88 int len_set; /* Set if message length set */
89 int L, M; /* L and M parameters from RFC3610 */
e75c5a79 90 int tls_aad_len; /* TLS AAD length */
0f113f3e
MC
91 CCM128_CONTEXT ccm;
92 ccm128_f str;
93} EVP_AES_CCM_CTX;
94
5158c763 95#ifndef OPENSSL_NO_OCB
0f113f3e 96typedef struct {
bdc985b1 97 union {
39147079 98 OSSL_UNION_ALIGN;
bdc985b1
AP
99 AES_KEY ks;
100 } ksenc; /* AES key schedule to use for encryption */
101 union {
39147079 102 OSSL_UNION_ALIGN;
bdc985b1
AP
103 AES_KEY ks;
104 } ksdec; /* AES key schedule to use for decryption */
0f113f3e
MC
105 int key_set; /* Set if key initialised */
106 int iv_set; /* Set if an iv is set */
107 OCB128_CONTEXT ocb;
108 unsigned char *iv; /* Temporary IV store */
109 unsigned char tag[16];
110 unsigned char data_buf[16]; /* Store partial data blocks */
111 unsigned char aad_buf[16]; /* Store partial AAD blocks */
112 int data_buf_len;
113 int aad_buf_len;
114 int ivlen; /* IV length */
115 int taglen;
116} EVP_AES_OCB_CTX;
5158c763 117#endif
e6b336ef 118
5158c763 119#define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
17f121de 120
03a5e5ae
PS
121/* increment counter (64-bit int) by 1 */
122static void ctr64_inc(unsigned char *counter)
123{
124 int n = 8;
125 unsigned char c;
126
127 do {
128 --n;
129 c = counter[n];
130 ++c;
131 counter[n] = c;
132 if (c)
133 return;
134 } while (n);
135}
136
459b15d4 137#if defined(AESNI_CAPABLE)
5158c763 138# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
5158c763 139# define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
0f113f3e 140 gctx->gcm.ghash==gcm_ghash_avx)
5158c763
MC
141# undef AES_GCM_ASM2 /* minor size optimization */
142# endif
4e049c52 143
17f121de 144static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
145 const unsigned char *iv, int enc)
146{
147 int ret, mode;
6435f0f6 148 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 149
ed576acd 150 mode = EVP_CIPHER_CTX_get_mode(ctx);
0f113f3e
MC
151 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
152 && !enc) {
ed576acd
TM
153 ret = aesni_set_decrypt_key(key,
154 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 155 &dat->ks.ks);
0f113f3e
MC
156 dat->block = (block128_f) aesni_decrypt;
157 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
158 (cbc128_f) aesni_cbc_encrypt : NULL;
159 } else {
ed576acd
TM
160 ret = aesni_set_encrypt_key(key,
161 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 162 &dat->ks.ks);
0f113f3e
MC
163 dat->block = (block128_f) aesni_encrypt;
164 if (mode == EVP_CIPH_CBC_MODE)
165 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
166 else if (mode == EVP_CIPH_CTR_MODE)
167 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
168 else
169 dat->stream.cbc = NULL;
170 }
171
172 if (ret < 0) {
9311d0c4 173 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
174 return 0;
175 }
176
177 return 1;
178}
179
180static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
181 const unsigned char *in, size_t len)
d1fff483 182{
6435f0f6 183 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
ed576acd 184 ctx->iv, EVP_CIPHER_CTX_is_encrypting(ctx));
d1fff483 185
0f113f3e 186 return 1;
d1fff483
AP
187}
188
0f113f3e
MC
189static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
190 const unsigned char *in, size_t len)
d1fff483 191{
ed576acd 192 size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
d1fff483 193
0f113f3e
MC
194 if (len < bl)
195 return 1;
d1fff483 196
6435f0f6 197 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
ed576acd 198 EVP_CIPHER_CTX_is_encrypting(ctx));
d1fff483 199
0f113f3e 200 return 1;
d1fff483
AP
201}
202
5158c763 203# define aesni_ofb_cipher aes_ofb_cipher
0f113f3e
MC
204static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
205 const unsigned char *in, size_t len);
d1fff483 206
5158c763 207# define aesni_cfb_cipher aes_cfb_cipher
0f113f3e
MC
208static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
209 const unsigned char *in, size_t len);
d1fff483 210
5158c763 211# define aesni_cfb8_cipher aes_cfb8_cipher
0f113f3e
MC
212static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
213 const unsigned char *in, size_t len);
d1fff483 214
5158c763 215# define aesni_cfb1_cipher aes_cfb1_cipher
0f113f3e
MC
216static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
217 const unsigned char *in, size_t len);
d1fff483 218
5158c763 219# define aesni_ctr_cipher aes_ctr_cipher
17f121de 220static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 221 const unsigned char *in, size_t len);
d1fff483 222
17f121de 223static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
224 const unsigned char *iv, int enc)
225{
6435f0f6 226 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
227 if (!iv && !key)
228 return 1;
229 if (key) {
ed576acd 230 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 231 &gctx->ks.ks);
0f113f3e
MC
232 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
233 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
234 /*
235 * If we have an iv can set it directly, otherwise use saved IV.
236 */
237 if (iv == NULL && gctx->iv_set)
238 iv = gctx->iv;
239 if (iv) {
240 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
241 gctx->iv_set = 1;
242 }
243 gctx->key_set = 1;
244 } else {
245 /* If key set use IV, otherwise copy */
246 if (gctx->key_set)
247 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
248 else
249 memcpy(gctx->iv, iv, gctx->ivlen);
250 gctx->iv_set = 1;
251 gctx->iv_gen = 0;
252 }
253 return 1;
254}
255
5158c763 256# define aesni_gcm_cipher aes_gcm_cipher
17f121de 257static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 258 const unsigned char *in, size_t len);
17f121de
AP
259
260static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
261 const unsigned char *iv, int enc)
262{
6435f0f6 263 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 264
0f113f3e
MC
265 if (!iv && !key)
266 return 1;
267
268 if (key) {
3538b0f7 269 /* The key is two half length keys in reality */
ed576acd 270 const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
3538b0f7
P
271 const int bits = bytes * 8;
272
273 /*
274 * Verify that the two keys are different.
4bd8b240 275 *
3538b0f7
P
276 * This addresses Rogaway's vulnerability.
277 * See comment in aes_xts_init_key() below.
278 */
2c840201
P
279 if ((!allow_insecure_decrypt || enc)
280 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 281 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
282 return 0;
283 }
284
0f113f3e
MC
285 /* key_len is two AES keys */
286 if (enc) {
3538b0f7 287 aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
288 xctx->xts.block1 = (block128_f) aesni_encrypt;
289 xctx->stream = aesni_xts_encrypt;
290 } else {
3538b0f7 291 aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
292 xctx->xts.block1 = (block128_f) aesni_decrypt;
293 xctx->stream = aesni_xts_decrypt;
294 }
295
3538b0f7 296 aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
297 xctx->xts.block2 = (block128_f) aesni_encrypt;
298
299 xctx->xts.key1 = &xctx->ks1;
300 }
301
302 if (iv) {
303 xctx->xts.key2 = &xctx->ks2;
9197c226 304 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
305 }
306
307 return 1;
308}
309
5158c763 310# define aesni_xts_cipher aes_xts_cipher
17f121de 311static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 312 const unsigned char *in, size_t len);
17f121de
AP
313
314static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
315 const unsigned char *iv, int enc)
316{
6435f0f6 317 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
318 if (!iv && !key)
319 return 1;
320 if (key) {
ed576acd 321 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 322 &cctx->ks.ks);
0f113f3e
MC
323 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
324 &cctx->ks, (block128_f) aesni_encrypt);
325 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
326 (ccm128_f) aesni_ccm64_decrypt_blocks;
327 cctx->key_set = 1;
328 }
329 if (iv) {
9197c226 330 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
331 cctx->iv_set = 1;
332 }
333 return 1;
334}
335
5158c763 336# define aesni_ccm_cipher aes_ccm_cipher
17f121de 337static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 338 const unsigned char *in, size_t len);
17f121de 339
5158c763 340# ifndef OPENSSL_NO_OCB
e6b336ef 341static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
342 const unsigned char *iv, int enc)
343{
6435f0f6 344 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
345 if (!iv && !key)
346 return 1;
347 if (key) {
348 do {
349 /*
350 * We set both the encrypt and decrypt key here because decrypt
351 * needs both. We could possibly optimise to remove setting the
352 * decrypt for an encryption operation.
353 */
ed576acd 354 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 355 &octx->ksenc.ks);
ed576acd 356 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 357 &octx->ksdec.ks);
bdc985b1
AP
358 if (!CRYPTO_ocb128_init(&octx->ocb,
359 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 360 (block128_f) aesni_encrypt,
bd30091c
AP
361 (block128_f) aesni_decrypt,
362 enc ? aesni_ocb_encrypt
363 : aesni_ocb_decrypt))
0f113f3e
MC
364 return 0;
365 }
366 while (0);
367
368 /*
369 * If we have an iv we can set it directly, otherwise use saved IV.
370 */
371 if (iv == NULL && octx->iv_set)
372 iv = octx->iv;
373 if (iv) {
374 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
375 != 1)
376 return 0;
377 octx->iv_set = 1;
378 }
379 octx->key_set = 1;
380 } else {
381 /* If key set use IV, otherwise copy */
382 if (octx->key_set)
383 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
384 else
385 memcpy(octx->iv, iv, octx->ivlen);
386 octx->iv_set = 1;
387 }
388 return 1;
389}
390
5158c763 391# define aesni_ocb_cipher aes_ocb_cipher
e6b336ef 392static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 393 const unsigned char *in, size_t len);
5158c763 394# endif /* OPENSSL_NO_OCB */
e6b336ef 395
5158c763 396# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
17f121de 397static const EVP_CIPHER aesni_##keylen##_##mode = { \
0f113f3e
MC
398 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
399 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 400 EVP_ORIG_GLOBAL, \
0f113f3e
MC
401 aesni_init_key, \
402 aesni_##mode##_cipher, \
403 NULL, \
404 sizeof(EVP_AES_KEY), \
405 NULL,NULL,NULL,NULL }; \
17f121de 406static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 407 nid##_##keylen##_##nmode,blocksize, \
f6c95e46 408 keylen/8,ivlen, \
0f113f3e 409 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 410 EVP_ORIG_GLOBAL, \
0f113f3e
MC
411 aes_init_key, \
412 aes_##mode##_cipher, \
413 NULL, \
414 sizeof(EVP_AES_KEY), \
415 NULL,NULL,NULL,NULL }; \
17f121de 416const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
8ca28da0 417{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
17f121de 418
5158c763 419# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
17f121de 420static const EVP_CIPHER aesni_##keylen##_##mode = { \
0f113f3e 421 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
422 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
423 ivlen, \
0f113f3e 424 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 425 EVP_ORIG_GLOBAL, \
0f113f3e
MC
426 aesni_##mode##_init_key, \
427 aesni_##mode##_cipher, \
428 aes_##mode##_cleanup, \
429 sizeof(EVP_AES_##MODE##_CTX), \
430 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de 431static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 432 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
433 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
434 ivlen, \
0f113f3e 435 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 436 EVP_ORIG_GLOBAL, \
0f113f3e
MC
437 aes_##mode##_init_key, \
438 aes_##mode##_cipher, \
439 aes_##mode##_cleanup, \
440 sizeof(EVP_AES_##MODE##_CTX), \
441 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de 442const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
8ca28da0 443{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
d1fff483 444
459b15d4 445#elif defined(SPARC_AES_CAPABLE)
c5f6da54
AP
446
447static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
448 const unsigned char *iv, int enc)
449{
450 int ret, mode, bits;
6435f0f6 451 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 452
ed576acd
TM
453 mode = EVP_CIPHER_CTX_get_mode(ctx);
454 bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
0f113f3e
MC
455 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
456 && !enc) {
457 ret = 0;
6435f0f6 458 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
0f113f3e
MC
459 dat->block = (block128_f) aes_t4_decrypt;
460 switch (bits) {
461 case 128:
462 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
463 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
464 break;
465 case 192:
466 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
467 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
468 break;
469 case 256:
470 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
471 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
472 break;
473 default:
474 ret = -1;
475 }
476 } else {
477 ret = 0;
6435f0f6 478 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
0f113f3e
MC
479 dat->block = (block128_f) aes_t4_encrypt;
480 switch (bits) {
481 case 128:
482 if (mode == EVP_CIPH_CBC_MODE)
483 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
484 else if (mode == EVP_CIPH_CTR_MODE)
485 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
486 else
487 dat->stream.cbc = NULL;
488 break;
489 case 192:
490 if (mode == EVP_CIPH_CBC_MODE)
491 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
492 else if (mode == EVP_CIPH_CTR_MODE)
493 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
494 else
495 dat->stream.cbc = NULL;
496 break;
497 case 256:
498 if (mode == EVP_CIPH_CBC_MODE)
499 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
500 else if (mode == EVP_CIPH_CTR_MODE)
501 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
502 else
503 dat->stream.cbc = NULL;
504 break;
505 default:
506 ret = -1;
507 }
508 }
509
510 if (ret < 0) {
9311d0c4 511 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
512 return 0;
513 }
514
515 return 1;
516}
517
5158c763 518# define aes_t4_cbc_cipher aes_cbc_cipher
0f113f3e
MC
519static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
520 const unsigned char *in, size_t len);
521
5158c763 522# define aes_t4_ecb_cipher aes_ecb_cipher
0f113f3e
MC
523static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
524 const unsigned char *in, size_t len);
525
5158c763 526# define aes_t4_ofb_cipher aes_ofb_cipher
0f113f3e
MC
527static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
528 const unsigned char *in, size_t len);
529
5158c763 530# define aes_t4_cfb_cipher aes_cfb_cipher
0f113f3e
MC
531static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
532 const unsigned char *in, size_t len);
533
5158c763 534# define aes_t4_cfb8_cipher aes_cfb8_cipher
0f113f3e
MC
535static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
536 const unsigned char *in, size_t len);
537
5158c763 538# define aes_t4_cfb1_cipher aes_cfb1_cipher
0f113f3e
MC
539static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
540 const unsigned char *in, size_t len);
541
5158c763 542# define aes_t4_ctr_cipher aes_ctr_cipher
c5f6da54 543static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 544 const unsigned char *in, size_t len);
c5f6da54
AP
545
546static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
547 const unsigned char *iv, int enc)
548{
6435f0f6 549 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
550 if (!iv && !key)
551 return 1;
552 if (key) {
ed576acd 553 int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
0f113f3e
MC
554 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
555 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
556 (block128_f) aes_t4_encrypt);
557 switch (bits) {
558 case 128:
559 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
560 break;
561 case 192:
562 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
563 break;
564 case 256:
565 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
566 break;
567 default:
568 return 0;
569 }
570 /*
571 * If we have an iv can set it directly, otherwise use saved IV.
572 */
573 if (iv == NULL && gctx->iv_set)
574 iv = gctx->iv;
575 if (iv) {
576 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
577 gctx->iv_set = 1;
578 }
579 gctx->key_set = 1;
580 } else {
581 /* If key set use IV, otherwise copy */
582 if (gctx->key_set)
583 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
584 else
585 memcpy(gctx->iv, iv, gctx->ivlen);
586 gctx->iv_set = 1;
587 gctx->iv_gen = 0;
588 }
589 return 1;
590}
591
5158c763 592# define aes_t4_gcm_cipher aes_gcm_cipher
c5f6da54 593static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 594 const unsigned char *in, size_t len);
c5f6da54
AP
595
596static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
597 const unsigned char *iv, int enc)
598{
6435f0f6 599 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 600
0f113f3e
MC
601 if (!iv && !key)
602 return 1;
603
604 if (key) {
3538b0f7 605 /* The key is two half length keys in reality */
ed576acd 606 const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
3538b0f7
P
607 const int bits = bytes * 8;
608
609 /*
610 * Verify that the two keys are different.
4bd8b240 611 *
3538b0f7
P
612 * This addresses Rogaway's vulnerability.
613 * See comment in aes_xts_init_key() below.
614 */
2c840201
P
615 if ((!allow_insecure_decrypt || enc)
616 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 617 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
618 return 0;
619 }
620
0f113f3e
MC
621 xctx->stream = NULL;
622 /* key_len is two AES keys */
623 if (enc) {
624 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
625 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
626 switch (bits) {
627 case 128:
628 xctx->stream = aes128_t4_xts_encrypt;
629 break;
0f113f3e
MC
630 case 256:
631 xctx->stream = aes256_t4_xts_encrypt;
632 break;
633 default:
634 return 0;
635 }
636 } else {
3538b0f7 637 aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
638 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
639 switch (bits) {
640 case 128:
641 xctx->stream = aes128_t4_xts_decrypt;
642 break;
0f113f3e
MC
643 case 256:
644 xctx->stream = aes256_t4_xts_decrypt;
645 break;
646 default:
647 return 0;
648 }
649 }
650
3538b0f7 651 aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
652 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
653
654 xctx->xts.key1 = &xctx->ks1;
655 }
656
657 if (iv) {
658 xctx->xts.key2 = &xctx->ks2;
9197c226 659 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
660 }
661
662 return 1;
663}
664
5158c763 665# define aes_t4_xts_cipher aes_xts_cipher
c5f6da54 666static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 667 const unsigned char *in, size_t len);
c5f6da54
AP
668
669static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
670 const unsigned char *iv, int enc)
671{
6435f0f6 672 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
673 if (!iv && !key)
674 return 1;
675 if (key) {
ed576acd 676 int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
0f113f3e
MC
677 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
678 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
679 &cctx->ks, (block128_f) aes_t4_encrypt);
bdc985b1 680 cctx->str = NULL;
0f113f3e
MC
681 cctx->key_set = 1;
682 }
683 if (iv) {
9197c226 684 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
685 cctx->iv_set = 1;
686 }
687 return 1;
688}
689
5158c763 690# define aes_t4_ccm_cipher aes_ccm_cipher
c5f6da54 691static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 692 const unsigned char *in, size_t len);
c5f6da54 693
5158c763 694# ifndef OPENSSL_NO_OCB
e6b336ef 695static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
696 const unsigned char *iv, int enc)
697{
6435f0f6 698 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
699 if (!iv && !key)
700 return 1;
701 if (key) {
702 do {
703 /*
704 * We set both the encrypt and decrypt key here because decrypt
705 * needs both. We could possibly optimise to remove setting the
706 * decrypt for an encryption operation.
707 */
ed576acd 708 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 709 &octx->ksenc.ks);
ed576acd 710 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 711 &octx->ksdec.ks);
bdc985b1
AP
712 if (!CRYPTO_ocb128_init(&octx->ocb,
713 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 714 (block128_f) aes_t4_encrypt,
02dc0b82
AP
715 (block128_f) aes_t4_decrypt,
716 NULL))
0f113f3e
MC
717 return 0;
718 }
719 while (0);
720
721 /*
722 * If we have an iv we can set it directly, otherwise use saved IV.
723 */
724 if (iv == NULL && octx->iv_set)
725 iv = octx->iv;
726 if (iv) {
727 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
728 != 1)
729 return 0;
730 octx->iv_set = 1;
731 }
732 octx->key_set = 1;
733 } else {
734 /* If key set use IV, otherwise copy */
735 if (octx->key_set)
736 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
737 else
738 memcpy(octx->iv, iv, octx->ivlen);
739 octx->iv_set = 1;
740 }
741 return 1;
742}
743
5158c763 744# define aes_t4_ocb_cipher aes_ocb_cipher
e6b336ef 745static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 746 const unsigned char *in, size_t len);
5158c763 747# endif /* OPENSSL_NO_OCB */
e6b336ef 748
87d06aed
MC
749# ifndef OPENSSL_NO_SIV
750# define aes_t4_siv_init_key aes_siv_init_key
751# define aes_t4_siv_cipher aes_siv_cipher
752# endif /* OPENSSL_NO_SIV */
753
5158c763 754# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
c5f6da54 755static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
0f113f3e
MC
756 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
757 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 758 EVP_ORIG_GLOBAL, \
0f113f3e
MC
759 aes_t4_init_key, \
760 aes_t4_##mode##_cipher, \
761 NULL, \
762 sizeof(EVP_AES_KEY), \
763 NULL,NULL,NULL,NULL }; \
c5f6da54 764static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
765 nid##_##keylen##_##nmode,blocksize, \
766 keylen/8,ivlen, \
767 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 768 EVP_ORIG_GLOBAL, \
0f113f3e
MC
769 aes_init_key, \
770 aes_##mode##_cipher, \
771 NULL, \
772 sizeof(EVP_AES_KEY), \
773 NULL,NULL,NULL,NULL }; \
c5f6da54
AP
774const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
775{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
776
5158c763 777# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
c5f6da54 778static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
0f113f3e 779 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
780 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
781 ivlen, \
0f113f3e 782 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 783 EVP_ORIG_GLOBAL, \
0f113f3e
MC
784 aes_t4_##mode##_init_key, \
785 aes_t4_##mode##_cipher, \
786 aes_##mode##_cleanup, \
787 sizeof(EVP_AES_##MODE##_CTX), \
788 NULL,NULL,aes_##mode##_ctrl,NULL }; \
c5f6da54 789static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 790 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
791 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
792 ivlen, \
0f113f3e 793 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 794 EVP_ORIG_GLOBAL, \
0f113f3e
MC
795 aes_##mode##_init_key, \
796 aes_##mode##_cipher, \
797 aes_##mode##_cleanup, \
798 sizeof(EVP_AES_##MODE##_CTX), \
799 NULL,NULL,aes_##mode##_ctrl,NULL }; \
c5f6da54
AP
800const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
801{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
802
459b15d4
SL
803#elif defined(S390X_aes_128_CAPABLE)
804/* IBM S390X support */
55bd169f
PS
805typedef struct {
806 union {
39147079 807 OSSL_UNION_ALIGN;
55bd169f
PS
808 /*-
809 * KM-AES parameter block - begin
810 * (see z/Architecture Principles of Operation >= SA22-7832-06)
811 */
812 struct {
813 unsigned char k[32];
814 } param;
815 /* KM-AES parameter block - end */
816 } km;
817 unsigned int fc;
818} S390X_AES_ECB_CTX;
819
dacd2a87
PS
820typedef struct {
821 union {
39147079 822 OSSL_UNION_ALIGN;
dacd2a87
PS
823 /*-
824 * KMO-AES parameter block - begin
825 * (see z/Architecture Principles of Operation >= SA22-7832-08)
826 */
827 struct {
828 unsigned char cv[16];
829 unsigned char k[32];
830 } param;
831 /* KMO-AES parameter block - end */
832 } kmo;
833 unsigned int fc;
834
835 int res;
836} S390X_AES_OFB_CTX;
837
74d38a86
PS
838typedef struct {
839 union {
39147079 840 OSSL_UNION_ALIGN;
74d38a86
PS
841 /*-
842 * KMF-AES parameter block - begin
843 * (see z/Architecture Principles of Operation >= SA22-7832-08)
844 */
845 struct {
846 unsigned char cv[16];
847 unsigned char k[32];
848 } param;
849 /* KMF-AES parameter block - end */
850 } kmf;
851 unsigned int fc;
852
853 int res;
854} S390X_AES_CFB_CTX;
855
96530eea
PS
856typedef struct {
857 union {
39147079 858 OSSL_UNION_ALIGN;
96530eea 859 /*-
5d2a6f4b
PS
860 * KMA-GCM-AES parameter block - begin
861 * (see z/Architecture Principles of Operation >= SA22-7832-11)
96530eea
PS
862 */
863 struct {
864 unsigned char reserved[12];
865 union {
866 unsigned int w;
867 unsigned char b[4];
868 } cv;
869 union {
870 unsigned long long g[2];
871 unsigned char b[16];
872 } t;
873 unsigned char h[16];
874 unsigned long long taadl;
875 unsigned long long tpcl;
876 union {
877 unsigned long long g[2];
878 unsigned int w[4];
879 } j0;
880 unsigned char k[32];
881 } param;
5d2a6f4b 882 /* KMA-GCM-AES parameter block - end */
96530eea
PS
883 } kma;
884 unsigned int fc;
885 int key_set;
886
887 unsigned char *iv;
888 int ivlen;
889 int iv_set;
890 int iv_gen;
891
892 int taglen;
893
894 unsigned char ares[16];
895 unsigned char mres[16];
896 unsigned char kres[16];
897 int areslen;
898 int mreslen;
899 int kreslen;
900
901 int tls_aad_len;
d6b34570 902 uint64_t tls_enc_records; /* Number of TLS records encrypted */
96530eea
PS
903} S390X_AES_GCM_CTX;
904
39f5b069
PS
905typedef struct {
906 union {
39147079 907 OSSL_UNION_ALIGN;
39f5b069
PS
908 /*-
909 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
910 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
911 * rounds field is used to store the function code and that the key
912 * schedule is not stored (if aes hardware support is detected).
913 */
914 struct {
915 unsigned char pad[16];
916 AES_KEY k;
917 } key;
918
919 struct {
920 /*-
921 * KMAC-AES parameter block - begin
922 * (see z/Architecture Principles of Operation >= SA22-7832-08)
923 */
924 struct {
925 union {
926 unsigned long long g[2];
927 unsigned char b[16];
928 } icv;
929 unsigned char k[32];
930 } kmac_param;
79c44b4e 931 /* KMAC-AES parameter block - end */
39f5b069
PS
932
933 union {
934 unsigned long long g[2];
935 unsigned char b[16];
936 } nonce;
937 union {
938 unsigned long long g[2];
939 unsigned char b[16];
940 } buf;
941
942 unsigned long long blocks;
943 int l;
944 int m;
945 int tls_aad_len;
946 int iv_set;
947 int tag_set;
948 int len_set;
949 int key_set;
950
951 unsigned char pad[140];
952 unsigned int fc;
953 } ccm;
954 } aes;
955} S390X_AES_CCM_CTX;
956
96530eea
PS
957# define s390x_aes_init_key aes_init_key
958static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
959 const unsigned char *iv, int enc);
960
dd6b2706 961# define S390X_AES_CBC_CTX EVP_AES_KEY
55bd169f
PS
962
963# define s390x_aes_cbc_init_key aes_init_key
96530eea
PS
964
965# define s390x_aes_cbc_cipher aes_cbc_cipher
966static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
967 const unsigned char *in, size_t len);
968
55bd169f
PS
969static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
970 const unsigned char *key,
971 const unsigned char *iv, int enc)
972{
973 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
ed576acd 974 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
55bd169f
PS
975
976 cctx->fc = S390X_AES_FC(keylen);
977 if (!enc)
978 cctx->fc |= S390X_DECRYPT;
979
980 memcpy(cctx->km.param.k, key, keylen);
981 return 1;
982}
96530eea 983
96530eea 984static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
55bd169f
PS
985 const unsigned char *in, size_t len)
986{
987 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
988
989 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
990 return 1;
991}
96530eea 992
dacd2a87
PS
993static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
994 const unsigned char *key,
995 const unsigned char *ivec, int enc)
996{
997 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
9197c226 998 const unsigned char *iv = ctx->oiv;
ed576acd
TM
999 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1000 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
55bd169f 1001
dacd2a87
PS
1002 memcpy(cctx->kmo.param.cv, iv, ivlen);
1003 memcpy(cctx->kmo.param.k, key, keylen);
1004 cctx->fc = S390X_AES_FC(keylen);
1005 cctx->res = 0;
1006 return 1;
1007}
96530eea 1008
96530eea 1009static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
dacd2a87
PS
1010 const unsigned char *in, size_t len)
1011{
1012 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
c719ea17
IF
1013 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1014 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
dacd2a87
PS
1015 int n = cctx->res;
1016 int rem;
1017
c719ea17 1018 memcpy(cctx->kmo.param.cv, iv, ivlen);
dacd2a87
PS
1019 while (n && len) {
1020 *out = *in ^ cctx->kmo.param.cv[n];
1021 n = (n + 1) & 0xf;
1022 --len;
1023 ++in;
1024 ++out;
1025 }
1026
1027 rem = len & 0xf;
1028
1029 len &= ~(size_t)0xf;
1030 if (len) {
1031 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1032
1033 out += len;
1034 in += len;
1035 }
1036
1037 if (rem) {
1038 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1039 cctx->kmo.param.k);
1040
1041 while (rem--) {
1042 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1043 ++n;
1044 }
1045 }
1046
c719ea17 1047 memcpy(iv, cctx->kmo.param.cv, ivlen);
dacd2a87
PS
1048 cctx->res = n;
1049 return 1;
1050}
96530eea 1051
74d38a86
PS
1052static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1053 const unsigned char *key,
1054 const unsigned char *ivec, int enc)
1055{
1056 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
9197c226 1057 const unsigned char *iv = ctx->oiv;
ed576acd
TM
1058 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1059 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
74d38a86
PS
1060
1061 cctx->fc = S390X_AES_FC(keylen);
1062 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1063 if (!enc)
1064 cctx->fc |= S390X_DECRYPT;
55bd169f 1065
74d38a86
PS
1066 cctx->res = 0;
1067 memcpy(cctx->kmf.param.cv, iv, ivlen);
1068 memcpy(cctx->kmf.param.k, key, keylen);
1069 return 1;
1070}
96530eea 1071
96530eea 1072static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
74d38a86
PS
1073 const unsigned char *in, size_t len)
1074{
1075 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
ed576acd
TM
1076 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1077 const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
c719ea17
IF
1078 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1079 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
74d38a86
PS
1080 int n = cctx->res;
1081 int rem;
1082 unsigned char tmp;
1083
c719ea17 1084 memcpy(cctx->kmf.param.cv, iv, ivlen);
74d38a86
PS
1085 while (n && len) {
1086 tmp = *in;
1087 *out = cctx->kmf.param.cv[n] ^ tmp;
1088 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1089 n = (n + 1) & 0xf;
1090 --len;
1091 ++in;
1092 ++out;
1093 }
1094
1095 rem = len & 0xf;
1096
1097 len &= ~(size_t)0xf;
1098 if (len) {
1099 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1100
1101 out += len;
1102 in += len;
1103 }
1104
1105 if (rem) {
1106 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1107 S390X_AES_FC(keylen), cctx->kmf.param.k);
1108
1109 while (rem--) {
1110 tmp = in[n];
1111 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1112 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1113 ++n;
1114 }
1115 }
96530eea 1116
c719ea17 1117 memcpy(iv, cctx->kmf.param.cv, ivlen);
74d38a86
PS
1118 cctx->res = n;
1119 return 1;
1120}
1121
74d38a86
PS
1122static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1123 const unsigned char *key,
1124 const unsigned char *ivec, int enc)
1125{
1126 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
9197c226 1127 const unsigned char *iv = ctx->oiv;
ed576acd
TM
1128 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1129 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
74d38a86
PS
1130
1131 cctx->fc = S390X_AES_FC(keylen);
1132 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1133 if (!enc)
1134 cctx->fc |= S390X_DECRYPT;
96530eea 1135
74d38a86
PS
1136 memcpy(cctx->kmf.param.cv, iv, ivlen);
1137 memcpy(cctx->kmf.param.k, key, keylen);
1138 return 1;
1139}
55bd169f 1140
96530eea 1141static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
74d38a86
PS
1142 const unsigned char *in, size_t len)
1143{
1144 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
c719ea17
IF
1145 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1146 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
74d38a86 1147
c719ea17 1148 memcpy(cctx->kmf.param.cv, iv, ivlen);
74d38a86 1149 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
c719ea17 1150 memcpy(iv, cctx->kmf.param.cv, ivlen);
74d38a86
PS
1151 return 1;
1152}
96530eea 1153
55bd169f
PS
1154# define s390x_aes_cfb1_init_key aes_init_key
1155
96530eea
PS
1156# define s390x_aes_cfb1_cipher aes_cfb1_cipher
1157static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1158 const unsigned char *in, size_t len);
1159
dd6b2706 1160# define S390X_AES_CTR_CTX EVP_AES_KEY
55bd169f
PS
1161
1162# define s390x_aes_ctr_init_key aes_init_key
96530eea
PS
1163
1164# define s390x_aes_ctr_cipher aes_ctr_cipher
1165static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1166 const unsigned char *in, size_t len);
1167
bcf082d1 1168/* iv + padding length for iv lengths != 12 */
dd6b2706 1169# define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
96530eea 1170
5d2a6f4b
PS
1171/*-
1172 * Process additional authenticated data. Returns 0 on success. Code is
1173 * big-endian.
1174 */
96530eea
PS
1175static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1176 size_t len)
1177{
1178 unsigned long long alen;
1179 int n, rem;
1180
1181 if (ctx->kma.param.tpcl)
1182 return -2;
1183
1184 alen = ctx->kma.param.taadl + len;
1185 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1186 return -1;
1187 ctx->kma.param.taadl = alen;
1188
1189 n = ctx->areslen;
1190 if (n) {
1191 while (n && len) {
1192 ctx->ares[n] = *aad;
1193 n = (n + 1) & 0xf;
1194 ++aad;
1195 --len;
1196 }
1197 /* ctx->ares contains a complete block if offset has wrapped around */
1198 if (!n) {
1199 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1200 ctx->fc |= S390X_KMA_HS;
1201 }
1202 ctx->areslen = n;
1203 }
1204
1205 rem = len & 0xf;
1206
25868993 1207 len &= ~(size_t)0xf;
96530eea
PS
1208 if (len) {
1209 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1210 aad += len;
1211 ctx->fc |= S390X_KMA_HS;
1212 }
1213
1214 if (rem) {
1215 ctx->areslen = rem;
1216
1217 do {
1218 --rem;
1219 ctx->ares[rem] = aad[rem];
1220 } while (rem);
1221 }
1222 return 0;
1223}
1224
5d2a6f4b
PS
1225/*-
1226 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1227 * success. Code is big-endian.
1228 */
96530eea
PS
1229static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1230 unsigned char *out, size_t len)
1231{
1232 const unsigned char *inptr;
1233 unsigned long long mlen;
1234 union {
1235 unsigned int w[4];
1236 unsigned char b[16];
1237 } buf;
1238 size_t inlen;
1239 int n, rem, i;
1240
1241 mlen = ctx->kma.param.tpcl + len;
1242 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1243 return -1;
1244 ctx->kma.param.tpcl = mlen;
1245
1246 n = ctx->mreslen;
1247 if (n) {
1248 inptr = in;
1249 inlen = len;
1250 while (n && inlen) {
1251 ctx->mres[n] = *inptr;
1252 n = (n + 1) & 0xf;
1253 ++inptr;
1254 --inlen;
1255 }
1256 /* ctx->mres contains a complete block if offset has wrapped around */
1257 if (!n) {
1258 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1259 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1260 ctx->fc |= S390X_KMA_HS;
1261 ctx->areslen = 0;
1262
1263 /* previous call already encrypted/decrypted its remainder,
1264 * see comment below */
1265 n = ctx->mreslen;
1266 while (n) {
1267 *out = buf.b[n];
1268 n = (n + 1) & 0xf;
1269 ++out;
1270 ++in;
1271 --len;
1272 }
1273 ctx->mreslen = 0;
1274 }
1275 }
1276
1277 rem = len & 0xf;
1278
25868993 1279 len &= ~(size_t)0xf;
96530eea
PS
1280 if (len) {
1281 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1282 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1283 in += len;
1284 out += len;
1285 ctx->fc |= S390X_KMA_HS;
1286 ctx->areslen = 0;
1287 }
1288
1289 /*-
1290 * If there is a remainder, it has to be saved such that it can be
1291 * processed by kma later. However, we also have to do the for-now
1292 * unauthenticated encryption/decryption part here and now...
1293 */
1294 if (rem) {
1295 if (!ctx->mreslen) {
1296 buf.w[0] = ctx->kma.param.j0.w[0];
1297 buf.w[1] = ctx->kma.param.j0.w[1];
1298 buf.w[2] = ctx->kma.param.j0.w[2];
1299 buf.w[3] = ctx->kma.param.cv.w + 1;
1300 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1301 }
1302
1303 n = ctx->mreslen;
1304 for (i = 0; i < rem; i++) {
1305 ctx->mres[n + i] = in[i];
1306 out[i] = in[i] ^ ctx->kres[n + i];
1307 }
1308
1309 ctx->mreslen += rem;
1310 }
1311 return 0;
1312}
1313
5d2a6f4b
PS
1314/*-
1315 * Initialize context structure. Code is big-endian.
1316 */
96530eea
PS
1317static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1318 const unsigned char *iv)
1319{
1320 ctx->kma.param.t.g[0] = 0;
1321 ctx->kma.param.t.g[1] = 0;
1322 ctx->kma.param.tpcl = 0;
1323 ctx->kma.param.taadl = 0;
1324 ctx->mreslen = 0;
1325 ctx->areslen = 0;
1326 ctx->kreslen = 0;
1327
1328 if (ctx->ivlen == 12) {
1329 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1330 ctx->kma.param.j0.w[3] = 1;
1331 ctx->kma.param.cv.w = 1;
1332 } else {
1333 /* ctx->iv has the right size and is already padded. */
1334 memcpy(ctx->iv, iv, ctx->ivlen);
1335 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1336 ctx->fc, &ctx->kma.param);
1337 ctx->fc |= S390X_KMA_HS;
1338
1339 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1340 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1341 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1342 ctx->kma.param.t.g[0] = 0;
1343 ctx->kma.param.t.g[1] = 0;
1344 }
1345}
1346
5d2a6f4b
PS
1347/*-
1348 * Performs various operations on the context structure depending on control
1349 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1350 * Code is big-endian.
1351 */
96530eea
PS
1352static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1353{
1354 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1355 S390X_AES_GCM_CTX *gctx_out;
1356 EVP_CIPHER_CTX *out;
9197c226 1357 unsigned char *buf;
96530eea
PS
1358 int ivlen, enc, len;
1359
1360 switch (type) {
1361 case EVP_CTRL_INIT:
ed576acd 1362 ivlen = EVP_CIPHER_get_iv_length(c->cipher);
96530eea
PS
1363 gctx->key_set = 0;
1364 gctx->iv_set = 0;
1365 gctx->ivlen = ivlen;
9197c226 1366 gctx->iv = c->iv;
96530eea
PS
1367 gctx->taglen = -1;
1368 gctx->iv_gen = 0;
1369 gctx->tls_aad_len = -1;
1370 return 1;
1371
7dddf2fc
SL
1372 case EVP_CTRL_GET_IVLEN:
1373 *(int *)ptr = gctx->ivlen;
1374 return 1;
1375
96530eea
PS
1376 case EVP_CTRL_AEAD_SET_IVLEN:
1377 if (arg <= 0)
1378 return 0;
1379
1380 if (arg != 12) {
96530eea
PS
1381 len = S390X_gcm_ivpadlen(arg);
1382
1383 /* Allocate memory for iv if needed. */
1384 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
9197c226 1385 if (gctx->iv != c->iv)
96530eea
PS
1386 OPENSSL_free(gctx->iv);
1387
cdb10bae 1388 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
9311d0c4 1389 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
96530eea 1390 return 0;
cdb10bae 1391 }
96530eea
PS
1392 }
1393 /* Add padding. */
1394 memset(gctx->iv + arg, 0, len - arg - 8);
1395 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1396 }
1397 gctx->ivlen = arg;
1398 return 1;
1399
1400 case EVP_CTRL_AEAD_SET_TAG:
1401 buf = EVP_CIPHER_CTX_buf_noconst(c);
ed576acd 1402 enc = EVP_CIPHER_CTX_is_encrypting(c);
96530eea
PS
1403 if (arg <= 0 || arg > 16 || enc)
1404 return 0;
1405
1406 memcpy(buf, ptr, arg);
1407 gctx->taglen = arg;
1408 return 1;
1409
1410 case EVP_CTRL_AEAD_GET_TAG:
ed576acd 1411 enc = EVP_CIPHER_CTX_is_encrypting(c);
96530eea
PS
1412 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1413 return 0;
1414
1415 memcpy(ptr, gctx->kma.param.t.b, arg);
1416 return 1;
1417
1418 case EVP_CTRL_GCM_SET_IV_FIXED:
1419 /* Special case: -1 length restores whole iv */
1420 if (arg == -1) {
1421 memcpy(gctx->iv, ptr, gctx->ivlen);
1422 gctx->iv_gen = 1;
1423 return 1;
1424 }
1425 /*
1426 * Fixed field must be at least 4 bytes and invocation field at least
1427 * 8.
1428 */
1429 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1430 return 0;
1431
1432 if (arg)
1433 memcpy(gctx->iv, ptr, arg);
1434
ed576acd 1435 enc = EVP_CIPHER_CTX_is_encrypting(c);
16cfc2c9
KR
1436 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1437 return 0;
96530eea
PS
1438
1439 gctx->iv_gen = 1;
1440 return 1;
1441
1442 case EVP_CTRL_GCM_IV_GEN:
1443 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1444 return 0;
1445
1446 s390x_aes_gcm_setiv(gctx, gctx->iv);
1447
1448 if (arg <= 0 || arg > gctx->ivlen)
1449 arg = gctx->ivlen;
1450
1451 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1452 /*
1453 * Invocation field will be at least 8 bytes in size and so no need
1454 * to check wrap around or increment more than last 8 bytes.
1455 */
03a5e5ae 1456 ctr64_inc(gctx->iv + gctx->ivlen - 8);
96530eea
PS
1457 gctx->iv_set = 1;
1458 return 1;
1459
1460 case EVP_CTRL_GCM_SET_IV_INV:
ed576acd 1461 enc = EVP_CIPHER_CTX_is_encrypting(c);
96530eea
PS
1462 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1463 return 0;
1464
1465 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1466 s390x_aes_gcm_setiv(gctx, gctx->iv);
1467 gctx->iv_set = 1;
1468 return 1;
1469
1470 case EVP_CTRL_AEAD_TLS1_AAD:
1471 /* Save the aad for later use. */
1472 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1473 return 0;
1474
1475 buf = EVP_CIPHER_CTX_buf_noconst(c);
1476 memcpy(buf, ptr, arg);
1477 gctx->tls_aad_len = arg;
d6b34570 1478 gctx->tls_enc_records = 0;
96530eea
PS
1479
1480 len = buf[arg - 2] << 8 | buf[arg - 1];
1481 /* Correct length for explicit iv. */
1482 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1483 return 0;
1484 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1485
1486 /* If decrypting correct for tag too. */
ed576acd 1487 enc = EVP_CIPHER_CTX_is_encrypting(c);
96530eea
PS
1488 if (!enc) {
1489 if (len < EVP_GCM_TLS_TAG_LEN)
1490 return 0;
1491 len -= EVP_GCM_TLS_TAG_LEN;
1492 }
1493 buf[arg - 2] = len >> 8;
1494 buf[arg - 1] = len & 0xff;
1495 /* Extra padding: tag appended to record. */
1496 return EVP_GCM_TLS_TAG_LEN;
1497
1498 case EVP_CTRL_COPY:
1499 out = ptr;
1500 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
96530eea 1501
9197c226
BK
1502 if (gctx->iv == c->iv) {
1503 gctx_out->iv = out->iv;
96530eea
PS
1504 } else {
1505 len = S390X_gcm_ivpadlen(gctx->ivlen);
1506
cdb10bae 1507 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
9311d0c4 1508 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
96530eea 1509 return 0;
cdb10bae 1510 }
96530eea
PS
1511
1512 memcpy(gctx_out->iv, gctx->iv, len);
1513 }
1514 return 1;
1515
1516 default:
1517 return -1;
1518 }
1519}
1520
5d2a6f4b
PS
1521/*-
1522 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1523 */
96530eea
PS
1524static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1525 const unsigned char *key,
1526 const unsigned char *iv, int enc)
1527{
1528 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1529 int keylen;
1530
1531 if (iv == NULL && key == NULL)
1532 return 1;
1533
1534 if (key != NULL) {
ed576acd 1535 keylen = EVP_CIPHER_CTX_get_key_length(ctx);
96530eea
PS
1536 memcpy(&gctx->kma.param.k, key, keylen);
1537
8eb399fb 1538 gctx->fc = S390X_AES_FC(keylen);
96530eea
PS
1539 if (!enc)
1540 gctx->fc |= S390X_DECRYPT;
1541
1542 if (iv == NULL && gctx->iv_set)
1543 iv = gctx->iv;
1544
1545 if (iv != NULL) {
1546 s390x_aes_gcm_setiv(gctx, iv);
1547 gctx->iv_set = 1;
1548 }
1549 gctx->key_set = 1;
1550 } else {
1551 if (gctx->key_set)
1552 s390x_aes_gcm_setiv(gctx, iv);
1553 else
1554 memcpy(gctx->iv, iv, gctx->ivlen);
1555
1556 gctx->iv_set = 1;
1557 gctx->iv_gen = 0;
1558 }
1559 return 1;
1560}
1561
5d2a6f4b
PS
1562/*-
1563 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1564 * if successful. Otherwise -1 is returned. Code is big-endian.
1565 */
96530eea
PS
1566static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1567 const unsigned char *in, size_t len)
1568{
1569 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1570 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
ed576acd 1571 const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
96530eea
PS
1572 int rv = -1;
1573
1574 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1575 return -1;
1576
d6b34570
P
1577 /*
1578 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1579 * Requirements from SP 800-38D". The requirements is for one party to the
1580 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1581 * side only.
1582 */
1583 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
9311d0c4 1584 ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
d6b34570
P
1585 goto err;
1586 }
1587
96530eea
PS
1588 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1589 : EVP_CTRL_GCM_SET_IV_INV,
1590 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1591 goto err;
1592
1593 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1594 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1595 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1596
1597 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1598 gctx->kma.param.tpcl = len << 3;
1599 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1600 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1601
1602 if (enc) {
1603 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1604 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1605 } else {
1606 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1607 EVP_GCM_TLS_TAG_LEN)) {
1608 OPENSSL_cleanse(out, len);
1609 goto err;
1610 }
1611 rv = len;
1612 }
1613err:
1614 gctx->iv_set = 0;
1615 gctx->tls_aad_len = -1;
1616 return rv;
1617}
1618
5d2a6f4b
PS
1619/*-
1620 * Called from EVP layer to initialize context, process additional
1621 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1622 * ciphertext or process a TLS packet, depending on context. Returns bytes
1623 * written on success. Otherwise -1 is returned. Code is big-endian.
1624 */
96530eea
PS
1625static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1626 const unsigned char *in, size_t len)
1627{
1628 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1629 unsigned char *buf, tmp[16];
1630 int enc;
1631
1632 if (!gctx->key_set)
1633 return -1;
1634
1635 if (gctx->tls_aad_len >= 0)
1636 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1637
1638 if (!gctx->iv_set)
1639 return -1;
1640
1641 if (in != NULL) {
1642 if (out == NULL) {
1643 if (s390x_aes_gcm_aad(gctx, in, len))
1644 return -1;
1645 } else {
1646 if (s390x_aes_gcm(gctx, in, out, len))
1647 return -1;
1648 }
1649 return len;
1650 } else {
1651 gctx->kma.param.taadl <<= 3;
1652 gctx->kma.param.tpcl <<= 3;
1653 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1654 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1655 /* recall that we already did en-/decrypt gctx->mres
1656 * and returned it to caller... */
1657 OPENSSL_cleanse(tmp, gctx->mreslen);
1658 gctx->iv_set = 0;
1659
ed576acd 1660 enc = EVP_CIPHER_CTX_is_encrypting(ctx);
96530eea
PS
1661 if (enc) {
1662 gctx->taglen = 16;
1663 } else {
1664 if (gctx->taglen < 0)
1665 return -1;
1666
1667 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1668 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1669 return -1;
1670 }
1671 return 0;
1672 }
1673}
1674
1675static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1676{
1677 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
96530eea
PS
1678
1679 if (gctx == NULL)
1680 return 0;
1681
9197c226 1682 if (gctx->iv != c->iv)
96530eea
PS
1683 OPENSSL_free(gctx->iv);
1684
1685 OPENSSL_cleanse(gctx, sizeof(*gctx));
1686 return 1;
1687}
1688
dd6b2706 1689# define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
96530eea
PS
1690
1691# define s390x_aes_xts_init_key aes_xts_init_key
1692static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1693 const unsigned char *key,
1694 const unsigned char *iv, int enc);
1695# define s390x_aes_xts_cipher aes_xts_cipher
1696static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1697 const unsigned char *in, size_t len);
1698# define s390x_aes_xts_ctrl aes_xts_ctrl
1699static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1700# define s390x_aes_xts_cleanup aes_xts_cleanup
1701
39f5b069
PS
1702/*-
1703 * Set nonce and length fields. Code is big-endian.
1704 */
1705static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1706 const unsigned char *nonce,
1707 size_t mlen)
1708{
1709 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1710 ctx->aes.ccm.nonce.g[1] = mlen;
1711 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1712}
1713
1714/*-
1715 * Process additional authenticated data. Code is big-endian.
1716 */
1717static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1718 size_t alen)
1719{
1720 unsigned char *ptr;
1721 int i, rem;
1722
1723 if (!alen)
1724 return;
1725
1726 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1727
1728 /* Suppress 'type-punned pointer dereference' warning. */
1729 ptr = ctx->aes.ccm.buf.b;
1730
1731 if (alen < ((1 << 16) - (1 << 8))) {
1732 *(uint16_t *)ptr = alen;
1733 i = 2;
1734 } else if (sizeof(alen) == 8
1735 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1736 *(uint16_t *)ptr = 0xffff;
1737 *(uint64_t *)(ptr + 2) = alen;
1738 i = 10;
1739 } else {
1740 *(uint16_t *)ptr = 0xfffe;
1741 *(uint32_t *)(ptr + 2) = alen;
1742 i = 6;
1743 }
1744
1745 while (i < 16 && alen) {
1746 ctx->aes.ccm.buf.b[i] = *aad;
1747 ++aad;
1748 --alen;
1749 ++i;
1750 }
1751 while (i < 16) {
1752 ctx->aes.ccm.buf.b[i] = 0;
1753 ++i;
1754 }
1755
1756 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1757 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1758 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1759 &ctx->aes.ccm.kmac_param);
1760 ctx->aes.ccm.blocks += 2;
1761
1762 rem = alen & 0xf;
25868993 1763 alen &= ~(size_t)0xf;
39f5b069
PS
1764 if (alen) {
1765 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1766 ctx->aes.ccm.blocks += alen >> 4;
1767 aad += alen;
1768 }
1769 if (rem) {
1770 for (i = 0; i < rem; i++)
1771 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1772
1773 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1774 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1775 ctx->aes.ccm.kmac_param.k);
1776 ctx->aes.ccm.blocks++;
1777 }
1778}
1779
1780/*-
1781 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1782 * success.
1783 */
1784static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1785 unsigned char *out, size_t len, int enc)
1786{
1787 size_t n, rem;
1788 unsigned int i, l, num;
1789 unsigned char flags;
1790
1791 flags = ctx->aes.ccm.nonce.b[0];
1792 if (!(flags & S390X_CCM_AAD_FLAG)) {
1793 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1794 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1795 ctx->aes.ccm.blocks++;
1796 }
1797 l = flags & 0x7;
1798 ctx->aes.ccm.nonce.b[0] = l;
1799
1800 /*-
1801 * Reconstruct length from encoded length field
1802 * and initialize it with counter value.
1803 */
1804 n = 0;
1805 for (i = 15 - l; i < 15; i++) {
1806 n |= ctx->aes.ccm.nonce.b[i];
1807 ctx->aes.ccm.nonce.b[i] = 0;
1808 n <<= 8;
1809 }
1810 n |= ctx->aes.ccm.nonce.b[15];
1811 ctx->aes.ccm.nonce.b[15] = 1;
1812
1813 if (n != len)
dd6b2706 1814 return -1; /* length mismatch */
39f5b069
PS
1815
1816 if (enc) {
1817 /* Two operations per block plus one for tag encryption */
1818 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1819 if (ctx->aes.ccm.blocks > (1ULL << 61))
dd6b2706 1820 return -2; /* too much data */
39f5b069
PS
1821 }
1822
1823 num = 0;
1824 rem = len & 0xf;
25868993 1825 len &= ~(size_t)0xf;
39f5b069
PS
1826
1827 if (enc) {
1828 /* mac-then-encrypt */
1829 if (len)
1830 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1831 if (rem) {
1832 for (i = 0; i < rem; i++)
1833 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1834
1835 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1836 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1837 ctx->aes.ccm.kmac_param.k);
1838 }
1839
1840 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1841 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1842 &num, (ctr128_f)AES_ctr32_encrypt);
1843 } else {
1844 /* decrypt-then-mac */
1845 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1846 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1847 &num, (ctr128_f)AES_ctr32_encrypt);
1848
1849 if (len)
1850 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1851 if (rem) {
1852 for (i = 0; i < rem; i++)
1853 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1854
1855 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1856 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1857 ctx->aes.ccm.kmac_param.k);
1858 }
1859 }
1860 /* encrypt tag */
1861 for (i = 15 - l; i < 16; i++)
1862 ctx->aes.ccm.nonce.b[i] = 0;
1863
1864 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1865 ctx->aes.ccm.kmac_param.k);
1866 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1867 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1868
dd6b2706 1869 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
39f5b069
PS
1870 return 0;
1871}
1872
1873/*-
1874 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1875 * if successful. Otherwise -1 is returned.
1876 */
1877static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1878 const unsigned char *in, size_t len)
1879{
1880 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
9197c226 1881 unsigned char *ivec = ctx->iv;
39f5b069 1882 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
ed576acd 1883 const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
39f5b069
PS
1884
1885 if (out != in
1886 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1887 return -1;
1888
1889 if (enc) {
1890 /* Set explicit iv (sequence number). */
1891 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1892 }
1893
1894 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1895 /*-
1896 * Get explicit iv (sequence number). We already have fixed iv
1897 * (server/client_write_iv) here.
1898 */
1899 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1900 s390x_aes_ccm_setiv(cctx, ivec, len);
1901
1902 /* Process aad (sequence number|type|version|length) */
1903 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1904
1905 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1906 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
96530eea 1907
39f5b069
PS
1908 if (enc) {
1909 if (s390x_aes_ccm(cctx, in, out, len, enc))
1910 return -1;
1911
1912 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1913 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1914 } else {
1915 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1916 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1917 cctx->aes.ccm.m))
1918 return len;
1919 }
1920
1921 OPENSSL_cleanse(out, len);
1922 return -1;
1923 }
1924}
1925
1926/*-
1927 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1928 * returned.
1929 */
96530eea
PS
1930static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1931 const unsigned char *key,
39f5b069
PS
1932 const unsigned char *iv, int enc)
1933{
1934 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
39f5b069
PS
1935 int keylen;
1936
1937 if (iv == NULL && key == NULL)
1938 return 1;
1939
1940 if (key != NULL) {
ed576acd 1941 keylen = EVP_CIPHER_CTX_get_key_length(ctx);
8eb399fb 1942 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
39f5b069
PS
1943 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1944
1945 /* Store encoded m and l. */
1946 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1947 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1948 memset(cctx->aes.ccm.nonce.b + 1, 0,
1949 sizeof(cctx->aes.ccm.nonce.b));
1950 cctx->aes.ccm.blocks = 0;
1951
1952 cctx->aes.ccm.key_set = 1;
1953 }
1954
1955 if (iv != NULL) {
9197c226 1956 memcpy(ctx->iv, iv, 15 - cctx->aes.ccm.l);
39f5b069
PS
1957
1958 cctx->aes.ccm.iv_set = 1;
1959 }
1960
1961 return 1;
1962}
1963
1964/*-
1965 * Called from EVP layer to initialize context, process additional
1966 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1967 * plaintext or process a TLS packet, depending on context. Returns bytes
1968 * written on success. Otherwise -1 is returned.
1969 */
96530eea 1970static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
39f5b069
PS
1971 const unsigned char *in, size_t len)
1972{
1973 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
ed576acd 1974 const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
39f5b069 1975 int rv;
9197c226 1976 unsigned char *buf;
39f5b069
PS
1977
1978 if (!cctx->aes.ccm.key_set)
1979 return -1;
1980
1981 if (cctx->aes.ccm.tls_aad_len >= 0)
1982 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1983
1984 /*-
1985 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1986 * so integrity must be checked already at Update() i.e., before
1987 * potentially corrupted data is output.
1988 */
1989 if (in == NULL && out != NULL)
1990 return 0;
1991
1992 if (!cctx->aes.ccm.iv_set)
1993 return -1;
1994
39f5b069
PS
1995 if (out == NULL) {
1996 /* Update(): Pass message length. */
1997 if (in == NULL) {
9197c226 1998 s390x_aes_ccm_setiv(cctx, ctx->iv, len);
39f5b069
PS
1999
2000 cctx->aes.ccm.len_set = 1;
2001 return len;
2002 }
2003
2004 /* Update(): Process aad. */
2005 if (!cctx->aes.ccm.len_set && len)
2006 return -1;
2007
2008 s390x_aes_ccm_aad(cctx, in, len);
2009 return len;
2010 }
2011
887e22dd
PS
2012 /* The tag must be set before actually decrypting data */
2013 if (!enc && !cctx->aes.ccm.tag_set)
2014 return -1;
2015
39f5b069
PS
2016 /* Update(): Process message. */
2017
2018 if (!cctx->aes.ccm.len_set) {
2019 /*-
46d08509 2020 * In case message length was not previously set explicitly via
39f5b069
PS
2021 * Update(), set it now.
2022 */
9197c226 2023 s390x_aes_ccm_setiv(cctx, ctx->iv, len);
39f5b069
PS
2024
2025 cctx->aes.ccm.len_set = 1;
2026 }
2027
2028 if (enc) {
2029 if (s390x_aes_ccm(cctx, in, out, len, enc))
2030 return -1;
2031
2032 cctx->aes.ccm.tag_set = 1;
2033 return len;
2034 } else {
2035 rv = -1;
2036
2037 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2038 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2039 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2040 cctx->aes.ccm.m))
2041 rv = len;
2042 }
2043
2044 if (rv == -1)
2045 OPENSSL_cleanse(out, len);
2046
2047 cctx->aes.ccm.iv_set = 0;
2048 cctx->aes.ccm.tag_set = 0;
2049 cctx->aes.ccm.len_set = 0;
2050 return rv;
2051 }
2052}
2053
2054/*-
2055 * Performs various operations on the context structure depending on control
2056 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2057 * Code is big-endian.
2058 */
2059static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2060{
2061 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
9197c226 2062 unsigned char *buf;
39f5b069
PS
2063 int enc, len;
2064
2065 switch (type) {
2066 case EVP_CTRL_INIT:
2067 cctx->aes.ccm.key_set = 0;
2068 cctx->aes.ccm.iv_set = 0;
2069 cctx->aes.ccm.l = 8;
2070 cctx->aes.ccm.m = 12;
2071 cctx->aes.ccm.tag_set = 0;
2072 cctx->aes.ccm.len_set = 0;
2073 cctx->aes.ccm.tls_aad_len = -1;
2074 return 1;
2075
7dddf2fc
SL
2076 case EVP_CTRL_GET_IVLEN:
2077 *(int *)ptr = 15 - cctx->aes.ccm.l;
2078 return 1;
2079
39f5b069
PS
2080 case EVP_CTRL_AEAD_TLS1_AAD:
2081 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2082 return 0;
2083
2084 /* Save the aad for later use. */
2085 buf = EVP_CIPHER_CTX_buf_noconst(c);
2086 memcpy(buf, ptr, arg);
2087 cctx->aes.ccm.tls_aad_len = arg;
2088
03a5e5ae 2089 len = buf[arg - 2] << 8 | buf[arg - 1];
39f5b069
PS
2090 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2091 return 0;
2092
2093 /* Correct length for explicit iv. */
2094 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2095
ed576acd 2096 enc = EVP_CIPHER_CTX_is_encrypting(c);
39f5b069
PS
2097 if (!enc) {
2098 if (len < cctx->aes.ccm.m)
2099 return 0;
2100
2101 /* Correct length for tag. */
2102 len -= cctx->aes.ccm.m;
2103 }
2104
03a5e5ae
PS
2105 buf[arg - 2] = len >> 8;
2106 buf[arg - 1] = len & 0xff;
2107
39f5b069
PS
2108 /* Extra padding: tag appended to record. */
2109 return cctx->aes.ccm.m;
2110
2111 case EVP_CTRL_CCM_SET_IV_FIXED:
2112 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2113 return 0;
2114
2115 /* Copy to first part of the iv. */
9197c226 2116 memcpy(c->iv, ptr, arg);
39f5b069
PS
2117 return 1;
2118
2119 case EVP_CTRL_AEAD_SET_IVLEN:
2120 arg = 15 - arg;
2121 /* fall-through */
2122
2123 case EVP_CTRL_CCM_SET_L:
2124 if (arg < 2 || arg > 8)
2125 return 0;
2126
2127 cctx->aes.ccm.l = arg;
2128 return 1;
2129
2130 case EVP_CTRL_AEAD_SET_TAG:
2131 if ((arg & 1) || arg < 4 || arg > 16)
2132 return 0;
2133
ed576acd 2134 enc = EVP_CIPHER_CTX_is_encrypting(c);
39f5b069
PS
2135 if (enc && ptr)
2136 return 0;
2137
2138 if (ptr) {
2139 cctx->aes.ccm.tag_set = 1;
2140 buf = EVP_CIPHER_CTX_buf_noconst(c);
2141 memcpy(buf, ptr, arg);
2142 }
2143
2144 cctx->aes.ccm.m = arg;
2145 return 1;
2146
2147 case EVP_CTRL_AEAD_GET_TAG:
ed576acd 2148 enc = EVP_CIPHER_CTX_is_encrypting(c);
39f5b069
PS
2149 if (!enc || !cctx->aes.ccm.tag_set)
2150 return 0;
2151
1287dabd 2152 if (arg < cctx->aes.ccm.m)
39f5b069
PS
2153 return 0;
2154
2155 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2156 cctx->aes.ccm.tag_set = 0;
2157 cctx->aes.ccm.iv_set = 0;
2158 cctx->aes.ccm.len_set = 0;
2159 return 1;
2160
2161 case EVP_CTRL_COPY:
2162 return 1;
2163
2164 default:
2165 return -1;
2166 }
2167}
2168
96530eea
PS
2169# define s390x_aes_ccm_cleanup aes_ccm_cleanup
2170
2171# ifndef OPENSSL_NO_OCB
dd6b2706 2172# define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
96530eea
PS
2173
2174# define s390x_aes_ocb_init_key aes_ocb_init_key
2175static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2176 const unsigned char *iv, int enc);
2177# define s390x_aes_ocb_cipher aes_ocb_cipher
2178static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2179 const unsigned char *in, size_t len);
2180# define s390x_aes_ocb_cleanup aes_ocb_cleanup
2181static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2182# define s390x_aes_ocb_ctrl aes_ocb_ctrl
2183static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2184# endif
2185
e74be3d4
RL
2186# ifndef OPENSSL_NO_SIV
2187# define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
e74be3d4
RL
2188
2189# define s390x_aes_siv_init_key aes_siv_init_key
2190# define s390x_aes_siv_cipher aes_siv_cipher
2191# define s390x_aes_siv_cleanup aes_siv_cleanup
2192# define s390x_aes_siv_ctrl aes_siv_ctrl
2193# endif
2194
dd6b2706
P
2195# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2196 MODE,flags) \
2197static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2198 nid##_##keylen##_##nmode,blocksize, \
2199 keylen / 8, \
2200 ivlen, \
2201 flags | EVP_CIPH_##MODE##_MODE, \
c39352e4 2202 EVP_ORIG_GLOBAL, \
dd6b2706
P
2203 s390x_aes_##mode##_init_key, \
2204 s390x_aes_##mode##_cipher, \
2205 NULL, \
2206 sizeof(S390X_AES_##MODE##_CTX), \
2207 NULL, \
2208 NULL, \
2209 NULL, \
2210 NULL \
2211}; \
2212static const EVP_CIPHER aes_##keylen##_##mode = { \
2213 nid##_##keylen##_##nmode, \
2214 blocksize, \
2215 keylen / 8, \
2216 ivlen, \
2217 flags | EVP_CIPH_##MODE##_MODE, \
c39352e4 2218 EVP_ORIG_GLOBAL, \
dd6b2706
P
2219 aes_init_key, \
2220 aes_##mode##_cipher, \
2221 NULL, \
2222 sizeof(EVP_AES_KEY), \
2223 NULL, \
2224 NULL, \
2225 NULL, \
2226 NULL \
2227}; \
2228const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2229{ \
2230 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2231 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
96530eea
PS
2232}
2233
2234# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
dd6b2706
P
2235static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2236 nid##_##keylen##_##mode, \
2237 blocksize, \
2238 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2239 ivlen, \
2240 flags | EVP_CIPH_##MODE##_MODE, \
c39352e4 2241 EVP_ORIG_GLOBAL, \
dd6b2706
P
2242 s390x_aes_##mode##_init_key, \
2243 s390x_aes_##mode##_cipher, \
2244 s390x_aes_##mode##_cleanup, \
2245 sizeof(S390X_AES_##MODE##_CTX), \
2246 NULL, \
2247 NULL, \
2248 s390x_aes_##mode##_ctrl, \
2249 NULL \
2250}; \
2251static const EVP_CIPHER aes_##keylen##_##mode = { \
2252 nid##_##keylen##_##mode,blocksize, \
2253 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2254 ivlen, \
2255 flags | EVP_CIPH_##MODE##_MODE, \
c39352e4 2256 EVP_ORIG_GLOBAL, \
dd6b2706
P
2257 aes_##mode##_init_key, \
2258 aes_##mode##_cipher, \
2259 aes_##mode##_cleanup, \
2260 sizeof(EVP_AES_##MODE##_CTX), \
2261 NULL, \
2262 NULL, \
2263 aes_##mode##_ctrl, \
2264 NULL \
2265}; \
2266const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2267{ \
2268 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2269 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
96530eea
PS
2270}
2271
5158c763 2272#else
17f121de 2273
5158c763 2274# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
17f121de 2275static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
2276 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2277 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 2278 EVP_ORIG_GLOBAL, \
0f113f3e
MC
2279 aes_init_key, \
2280 aes_##mode##_cipher, \
2281 NULL, \
2282 sizeof(EVP_AES_KEY), \
2283 NULL,NULL,NULL,NULL }; \
17f121de
AP
2284const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2285{ return &aes_##keylen##_##mode; }
d1fff483 2286
5158c763 2287# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
17f121de 2288static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 2289 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
2290 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2291 ivlen, \
0f113f3e 2292 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 2293 EVP_ORIG_GLOBAL, \
0f113f3e
MC
2294 aes_##mode##_init_key, \
2295 aes_##mode##_cipher, \
2296 aes_##mode##_cleanup, \
2297 sizeof(EVP_AES_##MODE##_CTX), \
2298 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de
AP
2299const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2300{ return &aes_##keylen##_##mode; }
9575d1a9 2301
5158c763 2302#endif
9575d1a9 2303
5158c763 2304#define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
0f113f3e
MC
2305 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2306 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2307 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2308 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2309 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2310 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2311 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
d1fff483
AP
2312
2313static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
2314 const unsigned char *iv, int enc)
2315{
2316 int ret, mode;
6435f0f6 2317 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2318
ed576acd 2319 mode = EVP_CIPHER_CTX_get_mode(ctx);
0f113f3e 2320 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
c01a3c6d 2321 && !enc) {
5158c763 2322#ifdef HWAES_CAPABLE
0f113f3e 2323 if (HWAES_CAPABLE) {
6435f0f6 2324 ret = HWAES_set_decrypt_key(key,
ed576acd 2325 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 2326 &dat->ks.ks);
0f113f3e
MC
2327 dat->block = (block128_f) HWAES_decrypt;
2328 dat->stream.cbc = NULL;
5158c763 2329# ifdef HWAES_cbc_encrypt
0f113f3e
MC
2330 if (mode == EVP_CIPH_CBC_MODE)
2331 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
0f113f3e 2332# endif
5158c763
MC
2333 } else
2334#endif
2335#ifdef BSAES_CAPABLE
0f113f3e 2336 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
ed576acd
TM
2337 ret = AES_set_decrypt_key(key,
2338 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 2339 &dat->ks.ks);
0f113f3e 2340 dat->block = (block128_f) AES_decrypt;
3675334e 2341 dat->stream.cbc = (cbc128_f) ossl_bsaes_cbc_encrypt;
0f113f3e 2342 } else
5158c763
MC
2343#endif
2344#ifdef VPAES_CAPABLE
0f113f3e 2345 if (VPAES_CAPABLE) {
6435f0f6 2346 ret = vpaes_set_decrypt_key(key,
ed576acd 2347 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 2348 &dat->ks.ks);
0f113f3e
MC
2349 dat->block = (block128_f) vpaes_decrypt;
2350 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2351 (cbc128_f) vpaes_cbc_encrypt : NULL;
2352 } else
5158c763 2353#endif
0f113f3e 2354 {
6435f0f6 2355 ret = AES_set_decrypt_key(key,
ed576acd 2356 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 2357 &dat->ks.ks);
0f113f3e
MC
2358 dat->block = (block128_f) AES_decrypt;
2359 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2360 (cbc128_f) AES_cbc_encrypt : NULL;
c01a3c6d 2361 }
0f113f3e 2362 } else
5158c763 2363#ifdef HWAES_CAPABLE
0f113f3e 2364 if (HWAES_CAPABLE) {
ed576acd
TM
2365 ret = HWAES_set_encrypt_key(key,
2366 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 2367 &dat->ks.ks);
0f113f3e
MC
2368 dat->block = (block128_f) HWAES_encrypt;
2369 dat->stream.cbc = NULL;
5158c763 2370# ifdef HWAES_cbc_encrypt
0f113f3e
MC
2371 if (mode == EVP_CIPH_CBC_MODE)
2372 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2373 else
5158c763
MC
2374# endif
2375# ifdef HWAES_ctr32_encrypt_blocks
0f113f3e
MC
2376 if (mode == EVP_CIPH_CTR_MODE)
2377 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2378 else
5158c763 2379# endif
0f113f3e
MC
2380 (void)0; /* terminate potentially open 'else' */
2381 } else
5158c763
MC
2382#endif
2383#ifdef BSAES_CAPABLE
0f113f3e 2384 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
ed576acd 2385 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 2386 &dat->ks.ks);
0f113f3e 2387 dat->block = (block128_f) AES_encrypt;
3675334e 2388 dat->stream.ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks;
0f113f3e 2389 } else
5158c763
MC
2390#endif
2391#ifdef VPAES_CAPABLE
0f113f3e 2392 if (VPAES_CAPABLE) {
ed576acd
TM
2393 ret = vpaes_set_encrypt_key(key,
2394 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 2395 &dat->ks.ks);
0f113f3e
MC
2396 dat->block = (block128_f) vpaes_encrypt;
2397 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2398 (cbc128_f) vpaes_cbc_encrypt : NULL;
2399 } else
5158c763 2400#endif
0f113f3e 2401 {
ed576acd 2402 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 2403 &dat->ks.ks);
0f113f3e
MC
2404 dat->block = (block128_f) AES_encrypt;
2405 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2406 (cbc128_f) AES_cbc_encrypt : NULL;
5158c763 2407#ifdef AES_CTR_ASM
0f113f3e
MC
2408 if (mode == EVP_CIPH_CTR_MODE)
2409 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
5158c763 2410#endif
0f113f3e 2411 }
d1fff483 2412
0f113f3e 2413 if (ret < 0) {
9311d0c4 2414 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
2415 return 0;
2416 }
d1fff483 2417
0f113f3e
MC
2418 return 1;
2419}
d1fff483 2420
0f113f3e
MC
2421static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2422 const unsigned char *in, size_t len)
17f121de 2423{
6435f0f6 2424 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2425
0f113f3e 2426 if (dat->stream.cbc)
9197c226 2427 (*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv,
ed576acd
TM
2428 EVP_CIPHER_CTX_is_encrypting(ctx));
2429 else if (EVP_CIPHER_CTX_is_encrypting(ctx))
9197c226
BK
2430 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
2431 dat->block);
0f113f3e 2432 else
6435f0f6 2433 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
9197c226 2434 ctx->iv, dat->block);
17f121de 2435
0f113f3e 2436 return 1;
17f121de
AP
2437}
2438
0f113f3e
MC
2439static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2440 const unsigned char *in, size_t len)
17f121de 2441{
ed576acd 2442 size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
0f113f3e 2443 size_t i;
6435f0f6 2444 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
17f121de 2445
0f113f3e
MC
2446 if (len < bl)
2447 return 1;
17f121de 2448
0f113f3e
MC
2449 for (i = 0, len -= bl; i <= len; i += bl)
2450 (*dat->block) (in + i, out + i, &dat->ks);
17f121de 2451
0f113f3e 2452 return 1;
17f121de 2453}
deb2c1a1 2454
0f113f3e
MC
2455static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2456 const unsigned char *in, size_t len)
17f121de 2457{
6435f0f6 2458 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2459
ed576acd 2460 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2461 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
9197c226 2462 ctx->iv, &num, dat->block);
6435f0f6 2463 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2464 return 1;
17f121de 2465}
deb2c1a1 2466
0f113f3e
MC
2467static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2468 const unsigned char *in, size_t len)
17f121de 2469{
6435f0f6 2470 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2471
ed576acd 2472 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2473 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
9197c226 2474 ctx->iv, &num,
ed576acd 2475 EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
6435f0f6 2476 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2477 return 1;
17f121de
AP
2478}
2479
0f113f3e
MC
2480static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2481 const unsigned char *in, size_t len)
17f121de 2482{
6435f0f6 2483 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2484
ed576acd 2485 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2486 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
9197c226 2487 ctx->iv, &num,
ed576acd 2488 EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
6435f0f6 2489 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2490 return 1;
17f121de 2491}
8d1ebe0b 2492
0f113f3e
MC
2493static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2494 const unsigned char *in, size_t len)
17f121de 2495{
6435f0f6 2496 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2497
6435f0f6 2498 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
ed576acd 2499 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2500 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
9197c226 2501 ctx->iv, &num,
ed576acd 2502 EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
6435f0f6 2503 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e
MC
2504 return 1;
2505 }
2506
2507 while (len >= MAXBITCHUNK) {
ed576acd 2508 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2509 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
9197c226 2510 ctx->iv, &num,
ed576acd 2511 EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
6435f0f6 2512 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2513 len -= MAXBITCHUNK;
604e591e
BE
2514 out += MAXBITCHUNK;
2515 in += MAXBITCHUNK;
0f113f3e 2516 }
6435f0f6 2517 if (len) {
ed576acd 2518 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2519 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
9197c226 2520 ctx->iv, &num,
ed576acd 2521 EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
6435f0f6
RL
2522 EVP_CIPHER_CTX_set_num(ctx, num);
2523 }
0f113f3e
MC
2524
2525 return 1;
17f121de 2526}
8d1ebe0b 2527
0f113f3e
MC
2528static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2529 const unsigned char *in, size_t len)
d976f992 2530{
042f8f70
P
2531 int n = EVP_CIPHER_CTX_get_num(ctx);
2532 unsigned int num;
6435f0f6 2533 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2534
042f8f70
P
2535 if (n < 0)
2536 return 0;
2537 num = (unsigned int)n;
2538
0f113f3e
MC
2539 if (dat->stream.ctr)
2540 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
9197c226 2541 ctx->iv,
6435f0f6
RL
2542 EVP_CIPHER_CTX_buf_noconst(ctx),
2543 &num, dat->stream.ctr);
0f113f3e
MC
2544 else
2545 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
9197c226 2546 ctx->iv,
6435f0f6
RL
2547 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2548 dat->block);
2549 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2550 return 1;
d976f992
AP
2551}
2552
0f113f3e
MC
2553BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2554 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2555 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
bdaa5415
DSH
2556
2557static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
0f113f3e 2558{
6435f0f6 2559 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
273a0218
BE
2560 if (gctx == NULL)
2561 return 0;
0f113f3e 2562 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
9197c226 2563 if (gctx->iv != c->iv)
0f113f3e
MC
2564 OPENSSL_free(gctx->iv);
2565 return 1;
2566}
bdaa5415
DSH
2567
2568static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 2569{
6435f0f6 2570 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
0f113f3e
MC
2571 switch (type) {
2572 case EVP_CTRL_INIT:
2573 gctx->key_set = 0;
2574 gctx->iv_set = 0;
ed576acd 2575 gctx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
c5307d9c 2576 gctx->iv = c->iv;
0f113f3e
MC
2577 gctx->taglen = -1;
2578 gctx->iv_gen = 0;
2579 gctx->tls_aad_len = -1;
2580 return 1;
2581
7dddf2fc
SL
2582 case EVP_CTRL_GET_IVLEN:
2583 *(int *)ptr = gctx->ivlen;
2584 return 1;
2585
e640fa02 2586 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e
MC
2587 if (arg <= 0)
2588 return 0;
2589 /* Allocate memory for IV if needed */
2590 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
c5307d9c 2591 if (gctx->iv != c->iv)
0f113f3e 2592 OPENSSL_free(gctx->iv);
cdb10bae 2593 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
9311d0c4 2594 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
0f113f3e 2595 return 0;
cdb10bae 2596 }
0f113f3e
MC
2597 }
2598 gctx->ivlen = arg;
2599 return 1;
2600
e640fa02 2601 case EVP_CTRL_AEAD_SET_TAG:
c5307d9c 2602 if (arg <= 0 || arg > 16 || c->encrypt)
0f113f3e 2603 return 0;
c5307d9c 2604 memcpy(c->buf, ptr, arg);
0f113f3e
MC
2605 gctx->taglen = arg;
2606 return 1;
2607
e640fa02 2608 case EVP_CTRL_AEAD_GET_TAG:
c5307d9c 2609 if (arg <= 0 || arg > 16 || !c->encrypt
6435f0f6 2610 || gctx->taglen < 0)
0f113f3e 2611 return 0;
c5307d9c 2612 memcpy(ptr, c->buf, arg);
0f113f3e
MC
2613 return 1;
2614
2615 case EVP_CTRL_GCM_SET_IV_FIXED:
2616 /* Special case: -1 length restores whole IV */
2617 if (arg == -1) {
2618 memcpy(gctx->iv, ptr, gctx->ivlen);
2619 gctx->iv_gen = 1;
2620 return 1;
2621 }
2622 /*
2623 * Fixed field must be at least 4 bytes and invocation field at least
2624 * 8.
2625 */
2626 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2627 return 0;
2628 if (arg)
2629 memcpy(gctx->iv, ptr, arg);
c5307d9c 2630 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
16cfc2c9 2631 return 0;
0f113f3e
MC
2632 gctx->iv_gen = 1;
2633 return 1;
2634
2635 case EVP_CTRL_GCM_IV_GEN:
2636 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2637 return 0;
2638 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2639 if (arg <= 0 || arg > gctx->ivlen)
2640 arg = gctx->ivlen;
2641 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2642 /*
2643 * Invocation field will be at least 8 bytes in size and so no need
2644 * to check wrap around or increment more than last 8 bytes.
2645 */
2646 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2647 gctx->iv_set = 1;
2648 return 1;
2649
2650 case EVP_CTRL_GCM_SET_IV_INV:
c5307d9c 2651 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
0f113f3e
MC
2652 return 0;
2653 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2654 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2655 gctx->iv_set = 1;
2656 return 1;
2657
2658 case EVP_CTRL_AEAD_TLS1_AAD:
2659 /* Save the AAD for later use */
c8269881 2660 if (arg != EVP_AEAD_TLS1_AAD_LEN)
0f113f3e 2661 return 0;
c5307d9c 2662 memcpy(c->buf, ptr, arg);
0f113f3e 2663 gctx->tls_aad_len = arg;
d6b34570 2664 gctx->tls_enc_records = 0;
0f113f3e 2665 {
c5307d9c 2666 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
0f113f3e 2667 /* Correct length for explicit IV */
2198b3a5
AP
2668 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2669 return 0;
0f113f3e
MC
2670 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2671 /* If decrypting correct for tag too */
c5307d9c 2672 if (!c->encrypt) {
2198b3a5
AP
2673 if (len < EVP_GCM_TLS_TAG_LEN)
2674 return 0;
0f113f3e 2675 len -= EVP_GCM_TLS_TAG_LEN;
2198b3a5 2676 }
c5307d9c
AP
2677 c->buf[arg - 2] = len >> 8;
2678 c->buf[arg - 1] = len & 0xff;
0f113f3e
MC
2679 }
2680 /* Extra padding: tag appended to record */
2681 return EVP_GCM_TLS_TAG_LEN;
2682
2683 case EVP_CTRL_COPY:
2684 {
2685 EVP_CIPHER_CTX *out = ptr;
6435f0f6 2686 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
0f113f3e
MC
2687 if (gctx->gcm.key) {
2688 if (gctx->gcm.key != &gctx->ks)
2689 return 0;
2690 gctx_out->gcm.key = &gctx_out->ks;
2691 }
c5307d9c
AP
2692 if (gctx->iv == c->iv)
2693 gctx_out->iv = out->iv;
0f113f3e 2694 else {
cdb10bae 2695 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
9311d0c4 2696 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
0f113f3e 2697 return 0;
cdb10bae 2698 }
0f113f3e
MC
2699 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2700 }
2701 return 1;
2702 }
2703
2704 default:
2705 return -1;
2706
2707 }
2708}
bdaa5415
DSH
2709
2710static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
2711 const unsigned char *iv, int enc)
2712{
6435f0f6 2713 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
2714 if (!iv && !key)
2715 return 1;
2716 if (key) {
2717 do {
5158c763 2718#ifdef HWAES_CAPABLE
0f113f3e 2719 if (HWAES_CAPABLE) {
c5307d9c 2720 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2721 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2722 (block128_f) HWAES_encrypt);
5158c763 2723# ifdef HWAES_ctr32_encrypt_blocks
0f113f3e 2724 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
5158c763 2725# else
0f113f3e 2726 gctx->ctr = NULL;
5158c763 2727# endif
0f113f3e
MC
2728 break;
2729 } else
5158c763
MC
2730#endif
2731#ifdef BSAES_CAPABLE
0f113f3e 2732 if (BSAES_CAPABLE) {
c5307d9c 2733 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2734 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2735 (block128_f) AES_encrypt);
3675334e 2736 gctx->ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks;
0f113f3e
MC
2737 break;
2738 } else
5158c763
MC
2739#endif
2740#ifdef VPAES_CAPABLE
0f113f3e 2741 if (VPAES_CAPABLE) {
c5307d9c 2742 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2743 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2744 (block128_f) vpaes_encrypt);
2745 gctx->ctr = NULL;
2746 break;
2747 } else
5158c763 2748#endif
0f113f3e
MC
2749 (void)0; /* terminate potentially open 'else' */
2750
c5307d9c 2751 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2752 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2753 (block128_f) AES_encrypt);
5158c763 2754#ifdef AES_CTR_ASM
0f113f3e 2755 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
5158c763 2756#else
0f113f3e 2757 gctx->ctr = NULL;
5158c763 2758#endif
0f113f3e
MC
2759 } while (0);
2760
2761 /*
2762 * If we have an iv can set it directly, otherwise use saved IV.
2763 */
2764 if (iv == NULL && gctx->iv_set)
2765 iv = gctx->iv;
2766 if (iv) {
2767 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2768 gctx->iv_set = 1;
2769 }
2770 gctx->key_set = 1;
2771 } else {
2772 /* If key set use IV, otherwise copy */
2773 if (gctx->key_set)
2774 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2775 else
2776 memcpy(gctx->iv, iv, gctx->ivlen);
2777 gctx->iv_set = 1;
2778 gctx->iv_gen = 0;
2779 }
2780 return 1;
2781}
2782
2783/*
2784 * Handle TLS GCM packet format. This consists of the last portion of the IV
28dd49fa
DSH
2785 * followed by the payload and finally the tag. On encrypt generate IV,
2786 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2787 * and verify tag.
2788 */
2789
2790static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
2791 const unsigned char *in, size_t len)
2792{
6435f0f6 2793 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
2794 int rv = -1;
2795 /* Encrypt/decrypt must be performed in place */
2796 if (out != in
2797 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2798 return -1;
df443918 2799
d6b34570
P
2800 /*
2801 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2802 * Requirements from SP 800-38D". The requirements is for one party to the
2803 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
2804 * side only.
2805 */
2806 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
9311d0c4 2807 ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
d6b34570
P
2808 goto err;
2809 }
2810
0f113f3e
MC
2811 /*
2812 * Set IV from start of buffer or generate IV and write to start of
2813 * buffer.
2814 */
c5307d9c
AP
2815 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
2816 : EVP_CTRL_GCM_SET_IV_INV,
0f113f3e
MC
2817 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2818 goto err;
2819 /* Use saved AAD */
c5307d9c 2820 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
0f113f3e
MC
2821 goto err;
2822 /* Fix buffer and length to point to payload */
2823 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2824 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2825 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
c5307d9c 2826 if (ctx->encrypt) {
0f113f3e
MC
2827 /* Encrypt payload */
2828 if (gctx->ctr) {
2829 size_t bulk = 0;
5158c763 2830#if defined(AES_GCM_ASM)
0f113f3e
MC
2831 if (len >= 32 && AES_GCM_ASM(gctx)) {
2832 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2833 return -1;
2834
2835 bulk = AES_gcm_encrypt(in, out, len,
2836 gctx->gcm.key,
2837 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2838 gctx->gcm.len.u[1] += bulk;
2839 }
5158c763 2840#endif
0f113f3e
MC
2841 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2842 in + bulk,
2843 out + bulk,
2844 len - bulk, gctx->ctr))
2845 goto err;
2846 } else {
2847 size_t bulk = 0;
5158c763 2848#if defined(AES_GCM_ASM2)
0f113f3e
MC
2849 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2850 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2851 return -1;
2852
2853 bulk = AES_gcm_encrypt(in, out, len,
2854 gctx->gcm.key,
2855 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2856 gctx->gcm.len.u[1] += bulk;
2857 }
5158c763 2858#endif
0f113f3e
MC
2859 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2860 in + bulk, out + bulk, len - bulk))
2861 goto err;
2862 }
2863 out += len;
2864 /* Finally write tag */
2865 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2866 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2867 } else {
2868 /* Decrypt */
2869 if (gctx->ctr) {
2870 size_t bulk = 0;
5158c763 2871#if defined(AES_GCM_ASM)
0f113f3e
MC
2872 if (len >= 16 && AES_GCM_ASM(gctx)) {
2873 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2874 return -1;
2875
2876 bulk = AES_gcm_decrypt(in, out, len,
2877 gctx->gcm.key,
2878 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2879 gctx->gcm.len.u[1] += bulk;
2880 }
5158c763 2881#endif
0f113f3e
MC
2882 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2883 in + bulk,
2884 out + bulk,
2885 len - bulk, gctx->ctr))
2886 goto err;
2887 } else {
2888 size_t bulk = 0;
5158c763 2889#if defined(AES_GCM_ASM2)
0f113f3e
MC
2890 if (len >= 16 && AES_GCM_ASM2(gctx)) {
2891 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2892 return -1;
2893
2894 bulk = AES_gcm_decrypt(in, out, len,
2895 gctx->gcm.key,
2896 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2897 gctx->gcm.len.u[1] += bulk;
2898 }
5158c763 2899#endif
0f113f3e
MC
2900 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2901 in + bulk, out + bulk, len - bulk))
2902 goto err;
2903 }
2904 /* Retrieve tag */
c5307d9c 2905 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
0f113f3e 2906 /* If tag mismatch wipe buffer */
c5307d9c 2907 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
0f113f3e
MC
2908 OPENSSL_cleanse(out, len);
2909 goto err;
2910 }
2911 rv = len;
2912 }
2913
2914 err:
2915 gctx->iv_set = 0;
2916 gctx->tls_aad_len = -1;
2917 return rv;
2918}
28dd49fa 2919
f844f9eb 2920#ifdef FIPS_MODULE
bcf082d1
SL
2921/*
2922 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
2923 *
2924 * See also 8.2.2 RBG-based construction.
2925 * Random construction consists of a free field (which can be NULL) and a
2926 * random field which will use a DRBG that can return at least 96 bits of
2927 * entropy strength. (The DRBG must be seeded by the FIPS module).
2928 */
2929static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
2930{
2931 int sz = gctx->ivlen - offset;
2932
2933 /* Must be at least 96 bits */
2934 if (sz <= 0 || gctx->ivlen < 12)
2935 return 0;
2936
2937 /* Use DRBG to generate random iv */
2938 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
2939 return 0;
2940 return 1;
2941}
f844f9eb 2942#endif /* FIPS_MODULE */
bcf082d1 2943
17f121de 2944static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
2945 const unsigned char *in, size_t len)
2946{
6435f0f6 2947 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
bcf082d1 2948
0f113f3e
MC
2949 /* If not set up, return error */
2950 if (!gctx->key_set)
2951 return -1;
2952
2953 if (gctx->tls_aad_len >= 0)
2954 return aes_gcm_tls_cipher(ctx, out, in, len);
2955
f844f9eb 2956#ifdef FIPS_MODULE
bcf082d1
SL
2957 /*
2958 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
2959 * The IV can still be set externally (the security policy will state that
2960 * this is not FIPS compliant). There are some applications
2961 * where setting the IV externally is the only option available.
2962 */
2963 if (!gctx->iv_set) {
2964 if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
2965 return -1;
2966 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2967 gctx->iv_set = 1;
2968 gctx->iv_gen_rand = 1;
2969 }
2970#else
0f113f3e
MC
2971 if (!gctx->iv_set)
2972 return -1;
f844f9eb 2973#endif /* FIPS_MODULE */
bcf082d1 2974
0f113f3e
MC
2975 if (in) {
2976 if (out == NULL) {
2977 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2978 return -1;
c5307d9c 2979 } else if (ctx->encrypt) {
0f113f3e
MC
2980 if (gctx->ctr) {
2981 size_t bulk = 0;
5158c763 2982#if defined(AES_GCM_ASM)
0f113f3e
MC
2983 if (len >= 32 && AES_GCM_ASM(gctx)) {
2984 size_t res = (16 - gctx->gcm.mres) % 16;
2985
2986 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2987 return -1;
2988
2989 bulk = AES_gcm_encrypt(in + res,
2990 out + res, len - res,
2991 gctx->gcm.key, gctx->gcm.Yi.c,
2992 gctx->gcm.Xi.u);
2993 gctx->gcm.len.u[1] += bulk;
2994 bulk += res;
2995 }
5158c763 2996#endif
0f113f3e
MC
2997 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2998 in + bulk,
2999 out + bulk,
3000 len - bulk, gctx->ctr))
3001 return -1;
3002 } else {
3003 size_t bulk = 0;
5158c763 3004#if defined(AES_GCM_ASM2)
0f113f3e
MC
3005 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3006 size_t res = (16 - gctx->gcm.mres) % 16;
3007
3008 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3009 return -1;
3010
3011 bulk = AES_gcm_encrypt(in + res,
3012 out + res, len - res,
3013 gctx->gcm.key, gctx->gcm.Yi.c,
3014 gctx->gcm.Xi.u);
3015 gctx->gcm.len.u[1] += bulk;
3016 bulk += res;
3017 }
5158c763 3018#endif
0f113f3e
MC
3019 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3020 in + bulk, out + bulk, len - bulk))
3021 return -1;
3022 }
3023 } else {
3024 if (gctx->ctr) {
3025 size_t bulk = 0;
5158c763 3026#if defined(AES_GCM_ASM)
0f113f3e
MC
3027 if (len >= 16 && AES_GCM_ASM(gctx)) {
3028 size_t res = (16 - gctx->gcm.mres) % 16;
3029
3030 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3031 return -1;
3032
3033 bulk = AES_gcm_decrypt(in + res,
3034 out + res, len - res,
3035 gctx->gcm.key,
3036 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3037 gctx->gcm.len.u[1] += bulk;
3038 bulk += res;
3039 }
5158c763 3040#endif
0f113f3e
MC
3041 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3042 in + bulk,
3043 out + bulk,
3044 len - bulk, gctx->ctr))
3045 return -1;
3046 } else {
3047 size_t bulk = 0;
5158c763 3048#if defined(AES_GCM_ASM2)
0f113f3e
MC
3049 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3050 size_t res = (16 - gctx->gcm.mres) % 16;
3051
3052 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3053 return -1;
3054
3055 bulk = AES_gcm_decrypt(in + res,
3056 out + res, len - res,
3057 gctx->gcm.key,
3058 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3059 gctx->gcm.len.u[1] += bulk;
3060 bulk += res;
3061 }
5158c763 3062#endif
0f113f3e
MC
3063 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3064 in + bulk, out + bulk, len - bulk))
3065 return -1;
3066 }
3067 }
3068 return len;
3069 } else {
c5307d9c 3070 if (!ctx->encrypt) {
0f113f3e
MC
3071 if (gctx->taglen < 0)
3072 return -1;
c5307d9c 3073 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
0f113f3e
MC
3074 return -1;
3075 gctx->iv_set = 0;
3076 return 0;
3077 }
c5307d9c 3078 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
0f113f3e
MC
3079 gctx->taglen = 16;
3080 /* Don't reuse the IV */
3081 gctx->iv_set = 0;
3082 return 0;
3083 }
3084
3085}
3086
5158c763 3087#define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
0f113f3e
MC
3088 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3089 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
7dddf2fc 3090 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
0f113f3e
MC
3091
3092BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3093 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3094 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3095 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3096 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3097 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
32a2d8dd
DSH
3098
3099static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3100{
2c840201
P
3101 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3102
0f113f3e
MC
3103 if (type == EVP_CTRL_COPY) {
3104 EVP_CIPHER_CTX *out = ptr;
6435f0f6 3105 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
2c840201 3106
0f113f3e
MC
3107 if (xctx->xts.key1) {
3108 if (xctx->xts.key1 != &xctx->ks1)
3109 return 0;
3110 xctx_out->xts.key1 = &xctx_out->ks1;
3111 }
3112 if (xctx->xts.key2) {
3113 if (xctx->xts.key2 != &xctx->ks2)
3114 return 0;
3115 xctx_out->xts.key2 = &xctx_out->ks2;
3116 }
3117 return 1;
3118 } else if (type != EVP_CTRL_INIT)
3119 return -1;
3120 /* key1 and key2 are used as an indicator both key and IV are set */
3121 xctx->xts.key1 = NULL;
3122 xctx->xts.key2 = NULL;
3123 return 1;
3124}
32a2d8dd
DSH
3125
3126static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3127 const unsigned char *iv, int enc)
3128{
6435f0f6 3129 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 3130
0f113f3e
MC
3131 if (!iv && !key)
3132 return 1;
3133
3538b0f7 3134 if (key) {
0f113f3e 3135 do {
3538b0f7 3136 /* The key is two half length keys in reality */
ed576acd 3137 const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
3538b0f7
P
3138 const int bits = bytes * 8;
3139
3140 /*
3141 * Verify that the two keys are different.
3142 *
3143 * This addresses the vulnerability described in Rogaway's
3144 * September 2004 paper:
3145 *
3146 * "Efficient Instantiations of Tweakable Blockciphers and
3147 * Refinements to Modes OCB and PMAC".
3148 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3149 *
3150 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3151 * that:
3152 * "The check for Key_1 != Key_2 shall be done at any place
3153 * BEFORE using the keys in the XTS-AES algorithm to process
3154 * data with them."
3155 */
2c840201
P
3156 if ((!allow_insecure_decrypt || enc)
3157 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 3158 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
3159 return 0;
3160 }
3161
5158c763 3162#ifdef AES_XTS_ASM
0f113f3e 3163 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
5158c763 3164#else
0f113f3e 3165 xctx->stream = NULL;
5158c763 3166#endif
0f113f3e 3167 /* key_len is two AES keys */
5158c763 3168#ifdef HWAES_CAPABLE
0f113f3e
MC
3169 if (HWAES_CAPABLE) {
3170 if (enc) {
3538b0f7 3171 HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e 3172 xctx->xts.block1 = (block128_f) HWAES_encrypt;
46f047d7
AP
3173# ifdef HWAES_xts_encrypt
3174 xctx->stream = HWAES_xts_encrypt;
3175# endif
0f113f3e 3176 } else {
3538b0f7 3177 HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e 3178 xctx->xts.block1 = (block128_f) HWAES_decrypt;
46f047d7
AP
3179# ifdef HWAES_xts_decrypt
3180 xctx->stream = HWAES_xts_decrypt;
3181#endif
0f113f3e
MC
3182 }
3183
3538b0f7 3184 HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3185 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3186
3187 xctx->xts.key1 = &xctx->ks1;
3188 break;
3189 } else
5158c763
MC
3190#endif
3191#ifdef BSAES_CAPABLE
0f113f3e 3192 if (BSAES_CAPABLE)
3675334e 3193 xctx->stream = enc ? ossl_bsaes_xts_encrypt : ossl_bsaes_xts_decrypt;
0f113f3e 3194 else
5158c763
MC
3195#endif
3196#ifdef VPAES_CAPABLE
0f113f3e
MC
3197 if (VPAES_CAPABLE) {
3198 if (enc) {
3538b0f7 3199 vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3200 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3201 } else {
3538b0f7 3202 vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3203 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3204 }
3205
3538b0f7 3206 vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3207 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3208
3209 xctx->xts.key1 = &xctx->ks1;
3210 break;
3211 } else
5158c763 3212#endif
0f113f3e
MC
3213 (void)0; /* terminate potentially open 'else' */
3214
3215 if (enc) {
3538b0f7 3216 AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3217 xctx->xts.block1 = (block128_f) AES_encrypt;
3218 } else {
3538b0f7 3219 AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3220 xctx->xts.block1 = (block128_f) AES_decrypt;
3221 }
3222
3538b0f7 3223 AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3224 xctx->xts.block2 = (block128_f) AES_encrypt;
3225
3226 xctx->xts.key1 = &xctx->ks1;
3227 } while (0);
3538b0f7 3228 }
0f113f3e
MC
3229
3230 if (iv) {
3231 xctx->xts.key2 = &xctx->ks2;
9197c226 3232 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
3233 }
3234
3235 return 1;
3236}
32a2d8dd 3237
17f121de 3238static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3239 const unsigned char *in, size_t len)
3240{
6435f0f6 3241 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
95eda4f0
P
3242
3243 if (xctx->xts.key1 == NULL
3244 || xctx->xts.key2 == NULL
3245 || out == NULL
3246 || in == NULL
3247 || len < AES_BLOCK_SIZE)
0f113f3e 3248 return 0;
95eda4f0 3249
5516c19b 3250 /*
79c44b4e 3251 * Impose a limit of 2^20 blocks per data unit as specified by
5516c19b
P
3252 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3253 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3254 * NIST SP 800-38E mandates the same limit.
3255 */
3256 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
9311d0c4 3257 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
5516c19b
P
3258 return 0;
3259 }
3260
0f113f3e
MC
3261 if (xctx->stream)
3262 (*xctx->stream) (in, out, len,
6435f0f6 3263 xctx->xts.key1, xctx->xts.key2,
9197c226
BK
3264 ctx->iv);
3265 else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
ed576acd 3266 EVP_CIPHER_CTX_is_encrypting(ctx)))
0f113f3e
MC
3267 return 0;
3268 return 1;
3269}
3270
5158c763 3271#define aes_xts_cleanup NULL
0f113f3e 3272
5158c763 3273#define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
0f113f3e
MC
3274 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3275 | EVP_CIPH_CUSTOM_COPY)
3276
3277BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3278 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
23916810
DSH
3279
3280static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3281{
6435f0f6 3282 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
0f113f3e
MC
3283 switch (type) {
3284 case EVP_CTRL_INIT:
3285 cctx->key_set = 0;
3286 cctx->iv_set = 0;
3287 cctx->L = 8;
3288 cctx->M = 12;
3289 cctx->tag_set = 0;
3290 cctx->len_set = 0;
e75c5a79
DSH
3291 cctx->tls_aad_len = -1;
3292 return 1;
3293
7dddf2fc
SL
3294 case EVP_CTRL_GET_IVLEN:
3295 *(int *)ptr = 15 - cctx->L;
3296 return 1;
3297
e75c5a79
DSH
3298 case EVP_CTRL_AEAD_TLS1_AAD:
3299 /* Save the AAD for later use */
3300 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3301 return 0;
6435f0f6 3302 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
e75c5a79
DSH
3303 cctx->tls_aad_len = arg;
3304 {
6435f0f6
RL
3305 uint16_t len =
3306 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3307 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
e75c5a79 3308 /* Correct length for explicit IV */
2198b3a5
AP
3309 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3310 return 0;
e75c5a79
DSH
3311 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3312 /* If decrypting correct for tag too */
ed576acd 3313 if (!EVP_CIPHER_CTX_is_encrypting(c)) {
2198b3a5
AP
3314 if (len < cctx->M)
3315 return 0;
e75c5a79 3316 len -= cctx->M;
2198b3a5 3317 }
6435f0f6
RL
3318 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3319 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
e75c5a79
DSH
3320 }
3321 /* Extra padding: tag appended to record */
3322 return cctx->M;
3323
3324 case EVP_CTRL_CCM_SET_IV_FIXED:
3325 /* Sanity check length */
3326 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3327 return 0;
3328 /* Just copy to first part of IV */
9197c226 3329 memcpy(c->iv, ptr, arg);
0f113f3e
MC
3330 return 1;
3331
e640fa02 3332 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e 3333 arg = 15 - arg;
018fcbec 3334 /* fall thru */
0f113f3e
MC
3335 case EVP_CTRL_CCM_SET_L:
3336 if (arg < 2 || arg > 8)
3337 return 0;
3338 cctx->L = arg;
3339 return 1;
3340
e640fa02 3341 case EVP_CTRL_AEAD_SET_TAG:
0f113f3e
MC
3342 if ((arg & 1) || arg < 4 || arg > 16)
3343 return 0;
ed576acd 3344 if (EVP_CIPHER_CTX_is_encrypting(c) && ptr)
0f113f3e
MC
3345 return 0;
3346 if (ptr) {
3347 cctx->tag_set = 1;
6435f0f6 3348 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
0f113f3e
MC
3349 }
3350 cctx->M = arg;
3351 return 1;
3352
e640fa02 3353 case EVP_CTRL_AEAD_GET_TAG:
ed576acd 3354 if (!EVP_CIPHER_CTX_is_encrypting(c) || !cctx->tag_set)
0f113f3e
MC
3355 return 0;
3356 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3357 return 0;
3358 cctx->tag_set = 0;
3359 cctx->iv_set = 0;
3360 cctx->len_set = 0;
3361 return 1;
3362
3363 case EVP_CTRL_COPY:
3364 {
3365 EVP_CIPHER_CTX *out = ptr;
6435f0f6 3366 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
0f113f3e
MC
3367 if (cctx->ccm.key) {
3368 if (cctx->ccm.key != &cctx->ks)
3369 return 0;
3370 cctx_out->ccm.key = &cctx_out->ks;
3371 }
3372 return 1;
3373 }
3374
3375 default:
3376 return -1;
3377
3378 }
3379}
23916810
DSH
3380
3381static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3382 const unsigned char *iv, int enc)
3383{
6435f0f6 3384 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
3385 if (!iv && !key)
3386 return 1;
3387 if (key)
3388 do {
5158c763 3389#ifdef HWAES_CAPABLE
0f113f3e 3390 if (HWAES_CAPABLE) {
ed576acd
TM
3391 HWAES_set_encrypt_key(key,
3392 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3393 &cctx->ks.ks);
0f113f3e
MC
3394
3395 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3396 &cctx->ks, (block128_f) HWAES_encrypt);
3397 cctx->str = NULL;
3398 cctx->key_set = 1;
3399 break;
3400 } else
5158c763
MC
3401#endif
3402#ifdef VPAES_CAPABLE
0f113f3e 3403 if (VPAES_CAPABLE) {
ed576acd
TM
3404 vpaes_set_encrypt_key(key,
3405 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3406 &cctx->ks.ks);
0f113f3e
MC
3407 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3408 &cctx->ks, (block128_f) vpaes_encrypt);
3409 cctx->str = NULL;
3410 cctx->key_set = 1;
3411 break;
3412 }
5158c763 3413#endif
ed576acd 3414 AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3415 &cctx->ks.ks);
0f113f3e
MC
3416 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3417 &cctx->ks, (block128_f) AES_encrypt);
3418 cctx->str = NULL;
3419 cctx->key_set = 1;
3420 } while (0);
3421 if (iv) {
9197c226 3422 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
3423 cctx->iv_set = 1;
3424 }
3425 return 1;
3426}
23916810 3427
e75c5a79
DSH
3428static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3429 const unsigned char *in, size_t len)
3430{
6435f0f6 3431 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
e75c5a79
DSH
3432 CCM128_CONTEXT *ccm = &cctx->ccm;
3433 /* Encrypt/decrypt must be performed in place */
3434 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3435 return -1;
3436 /* If encrypting set explicit IV from sequence number (start of AAD) */
ed576acd 3437 if (EVP_CIPHER_CTX_is_encrypting(ctx))
6435f0f6
RL
3438 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3439 EVP_CCM_TLS_EXPLICIT_IV_LEN);
e75c5a79 3440 /* Get rest of IV from explicit IV */
9197c226 3441 memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
6435f0f6 3442 EVP_CCM_TLS_EXPLICIT_IV_LEN);
e75c5a79
DSH
3443 /* Correct length value */
3444 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
9197c226 3445 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
6435f0f6 3446 len))
e75c5a79
DSH
3447 return -1;
3448 /* Use saved AAD */
ed576acd
TM
3449 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx),
3450 cctx->tls_aad_len);
e75c5a79
DSH
3451 /* Fix buffer to point to payload */
3452 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3453 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
ed576acd 3454 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
e75c5a79
DSH
3455 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3456 cctx->str) :
3457 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3458 return -1;
3459 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3460 return -1;
3461 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3462 } else {
3463 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3464 cctx->str) :
3465 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3466 unsigned char tag[16];
3467 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3468 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3469 return len;
3470 }
3471 }
3472 OPENSSL_cleanse(out, len);
3473 return -1;
3474 }
3475}
3476
17f121de 3477static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3478 const unsigned char *in, size_t len)
3479{
6435f0f6 3480 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
3481 CCM128_CONTEXT *ccm = &cctx->ccm;
3482 /* If not set up, return error */
e75c5a79
DSH
3483 if (!cctx->key_set)
3484 return -1;
3485
3486 if (cctx->tls_aad_len >= 0)
3487 return aes_ccm_tls_cipher(ctx, out, in, len);
3488
197421b1
DSH
3489 /* EVP_*Final() doesn't return any data */
3490 if (in == NULL && out != NULL)
3491 return 0;
3492
e75c5a79 3493 if (!cctx->iv_set)
0f113f3e 3494 return -1;
e75c5a79 3495
0f113f3e
MC
3496 if (!out) {
3497 if (!in) {
9197c226 3498 if (CRYPTO_ccm128_setiv(ccm, ctx->iv,
6435f0f6 3499 15 - cctx->L, len))
0f113f3e
MC
3500 return -1;
3501 cctx->len_set = 1;
3502 return len;
3503 }
3504 /* If have AAD need message length */
3505 if (!cctx->len_set && len)
3506 return -1;
3507 CRYPTO_ccm128_aad(ccm, in, len);
3508 return len;
3509 }
67c81ec3
TN
3510
3511 /* The tag must be set before actually decrypting data */
ed576acd 3512 if (!EVP_CIPHER_CTX_is_encrypting(ctx) && !cctx->tag_set)
67c81ec3
TN
3513 return -1;
3514
0f113f3e
MC
3515 /* If not set length yet do it */
3516 if (!cctx->len_set) {
9197c226 3517 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
0f113f3e
MC
3518 return -1;
3519 cctx->len_set = 1;
3520 }
ed576acd 3521 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
3522 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3523 cctx->str) :
3524 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3525 return -1;
3526 cctx->tag_set = 1;
3527 return len;
3528 } else {
3529 int rv = -1;
3530 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3531 cctx->str) :
3532 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3533 unsigned char tag[16];
3534 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
6435f0f6
RL
3535 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3536 cctx->M))
0f113f3e
MC
3537 rv = len;
3538 }
3539 }
3540 if (rv == -1)
3541 OPENSSL_cleanse(out, len);
3542 cctx->iv_set = 0;
3543 cctx->tag_set = 0;
3544 cctx->len_set = 0;
3545 return rv;
3546 }
0f113f3e
MC
3547}
3548
5158c763 3549#define aes_ccm_cleanup NULL
0f113f3e 3550
e75c5a79
DSH
3551BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3552 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
f6c95e46
RS
3553BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3554 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3555BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3556 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
0f113f3e
MC
3557
3558typedef struct {
3559 union {
39147079 3560 OSSL_UNION_ALIGN;
0f113f3e
MC
3561 AES_KEY ks;
3562 } ks;
3563 /* Indicates if IV has been set */
3564 unsigned char *iv;
3565} EVP_AES_WRAP_CTX;
97cf1f6c
DSH
3566
3567static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3568 const unsigned char *iv, int enc)
3569{
0341ff97 3570 int len;
6435f0f6 3571 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
0341ff97
P
3572
3573 if (iv == NULL && key == NULL)
0f113f3e 3574 return 1;
0341ff97 3575 if (key != NULL) {
ed576acd
TM
3576 if (EVP_CIPHER_CTX_is_encrypting(ctx))
3577 AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3578 &wctx->ks.ks);
0f113f3e 3579 else
ed576acd 3580 AES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3581 &wctx->ks.ks);
0341ff97 3582 if (iv == NULL)
0f113f3e
MC
3583 wctx->iv = NULL;
3584 }
0341ff97
P
3585 if (iv != NULL) {
3586 if ((len = EVP_CIPHER_CTX_get_iv_length(ctx)) < 0)
3587 return 0;
3588 memcpy(ctx->iv, iv, len);
9197c226 3589 wctx->iv = ctx->iv;
0f113f3e
MC
3590 }
3591 return 1;
3592}
97cf1f6c
DSH
3593
3594static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3595 const unsigned char *in, size_t inlen)
3596{
6435f0f6 3597 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
0f113f3e
MC
3598 size_t rv;
3599 /* AES wrap with padding has IV length of 4, without padding 8 */
ed576acd 3600 int pad = EVP_CIPHER_CTX_get_iv_length(ctx) == 4;
0f113f3e
MC
3601 /* No final operation so always return zero length */
3602 if (!in)
3603 return 0;
3604 /* Input length must always be non-zero */
3605 if (!inlen)
3606 return -1;
3607 /* If decrypting need at least 16 bytes and multiple of 8 */
ed576acd 3608 if (!EVP_CIPHER_CTX_is_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
0f113f3e
MC
3609 return -1;
3610 /* If not padding input must be multiple of 8 */
3611 if (!pad && inlen & 0x7)
3612 return -1;
6d777689 3613 if (ossl_is_partially_overlapping(out, in, inlen)) {
9311d0c4 3614 ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
7141ba31
MC
3615 return 0;
3616 }
0f113f3e 3617 if (!out) {
ed576acd 3618 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
3619 /* If padding round up to multiple of 8 */
3620 if (pad)
3621 inlen = (inlen + 7) / 8 * 8;
3622 /* 8 byte prefix */
3623 return inlen + 8;
3624 } else {
3625 /*
3626 * If not padding output will be exactly 8 bytes smaller than
3627 * input. If padding it will be at least 8 bytes smaller but we
3628 * don't know how much.
3629 */
3630 return inlen - 8;
3631 }
3632 }
3633 if (pad) {
ed576acd 3634 if (EVP_CIPHER_CTX_is_encrypting(ctx))
0f113f3e
MC
3635 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3636 out, in, inlen,
3637 (block128_f) AES_encrypt);
3638 else
3639 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3640 out, in, inlen,
3641 (block128_f) AES_decrypt);
3642 } else {
ed576acd 3643 if (EVP_CIPHER_CTX_is_encrypting(ctx))
0f113f3e
MC
3644 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3645 out, in, inlen, (block128_f) AES_encrypt);
3646 else
3647 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3648 out, in, inlen, (block128_f) AES_decrypt);
3649 }
3650 return rv ? (int)rv : -1;
3651}
3652
5158c763 3653#define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
0f113f3e
MC
3654 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3655 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
97cf1f6c
DSH
3656
3657static const EVP_CIPHER aes_128_wrap = {
0f113f3e 3658 NID_id_aes128_wrap,
f6c95e46 3659 8, 16, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3660 aes_wrap_init_key, aes_wrap_cipher,
3661 NULL,
3662 sizeof(EVP_AES_WRAP_CTX),
3663 NULL, NULL, NULL, NULL
3664};
97cf1f6c
DSH
3665
3666const EVP_CIPHER *EVP_aes_128_wrap(void)
0f113f3e
MC
3667{
3668 return &aes_128_wrap;
3669}
97cf1f6c
DSH
3670
3671static const EVP_CIPHER aes_192_wrap = {
0f113f3e 3672 NID_id_aes192_wrap,
f6c95e46 3673 8, 24, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3674 aes_wrap_init_key, aes_wrap_cipher,
3675 NULL,
3676 sizeof(EVP_AES_WRAP_CTX),
3677 NULL, NULL, NULL, NULL
3678};
97cf1f6c
DSH
3679
3680const EVP_CIPHER *EVP_aes_192_wrap(void)
0f113f3e
MC
3681{
3682 return &aes_192_wrap;
3683}
97cf1f6c
DSH
3684
3685static const EVP_CIPHER aes_256_wrap = {
0f113f3e 3686 NID_id_aes256_wrap,
f6c95e46 3687 8, 32, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3688 aes_wrap_init_key, aes_wrap_cipher,
3689 NULL,
3690 sizeof(EVP_AES_WRAP_CTX),
3691 NULL, NULL, NULL, NULL
3692};
97cf1f6c
DSH
3693
3694const EVP_CIPHER *EVP_aes_256_wrap(void)
0f113f3e
MC
3695{
3696 return &aes_256_wrap;
3697}
97cf1f6c 3698
d31fed73 3699static const EVP_CIPHER aes_128_wrap_pad = {
0f113f3e 3700 NID_id_aes128_wrap_pad,
f6c95e46 3701 8, 16, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3702 aes_wrap_init_key, aes_wrap_cipher,
3703 NULL,
3704 sizeof(EVP_AES_WRAP_CTX),
3705 NULL, NULL, NULL, NULL
3706};
d31fed73
DSH
3707
3708const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
0f113f3e
MC
3709{
3710 return &aes_128_wrap_pad;
3711}
d31fed73
DSH
3712
3713static const EVP_CIPHER aes_192_wrap_pad = {
0f113f3e 3714 NID_id_aes192_wrap_pad,
f6c95e46 3715 8, 24, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3716 aes_wrap_init_key, aes_wrap_cipher,
3717 NULL,
3718 sizeof(EVP_AES_WRAP_CTX),
3719 NULL, NULL, NULL, NULL
3720};
d31fed73
DSH
3721
3722const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
0f113f3e
MC
3723{
3724 return &aes_192_wrap_pad;
3725}
d31fed73
DSH
3726
3727static const EVP_CIPHER aes_256_wrap_pad = {
0f113f3e 3728 NID_id_aes256_wrap_pad,
f6c95e46 3729 8, 32, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3730 aes_wrap_init_key, aes_wrap_cipher,
3731 NULL,
3732 sizeof(EVP_AES_WRAP_CTX),
3733 NULL, NULL, NULL, NULL
3734};
d31fed73
DSH
3735
3736const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
0f113f3e
MC
3737{
3738 return &aes_256_wrap_pad;
3739}
d31fed73 3740
5158c763 3741#ifndef OPENSSL_NO_OCB
e6b336ef 3742static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3743{
6435f0f6 3744 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
0f113f3e
MC
3745 EVP_CIPHER_CTX *newc;
3746 EVP_AES_OCB_CTX *new_octx;
3747
3748 switch (type) {
3749 case EVP_CTRL_INIT:
3750 octx->key_set = 0;
3751 octx->iv_set = 0;
ed576acd 3752 octx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
9197c226 3753 octx->iv = c->iv;
0f113f3e
MC
3754 octx->taglen = 16;
3755 octx->data_buf_len = 0;
3756 octx->aad_buf_len = 0;
3757 return 1;
3758
7dddf2fc
SL
3759 case EVP_CTRL_GET_IVLEN:
3760 *(int *)ptr = octx->ivlen;
3761 return 1;
3762
e640fa02 3763 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e
MC
3764 /* IV len must be 1 to 15 */
3765 if (arg <= 0 || arg > 15)
3766 return 0;
3767
3768 octx->ivlen = arg;
3769 return 1;
3770
e640fa02 3771 case EVP_CTRL_AEAD_SET_TAG:
12a765a5 3772 if (ptr == NULL) {
d57d135c
MC
3773 /* Tag len must be 0 to 16 */
3774 if (arg < 0 || arg > 16)
3775 return 0;
3776
3777 octx->taglen = arg;
3778 return 1;
3779 }
ed576acd 3780 if (arg != octx->taglen || EVP_CIPHER_CTX_is_encrypting(c))
0f113f3e
MC
3781 return 0;
3782 memcpy(octx->tag, ptr, arg);
3783 return 1;
3784
e640fa02 3785 case EVP_CTRL_AEAD_GET_TAG:
ed576acd 3786 if (arg != octx->taglen || !EVP_CIPHER_CTX_is_encrypting(c))
0f113f3e
MC
3787 return 0;
3788
3789 memcpy(ptr, octx->tag, arg);
3790 return 1;
3791
3792 case EVP_CTRL_COPY:
3793 newc = (EVP_CIPHER_CTX *)ptr;
6435f0f6 3794 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
0f113f3e 3795 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
bdc985b1
AP
3796 &new_octx->ksenc.ks,
3797 &new_octx->ksdec.ks);
0f113f3e
MC
3798
3799 default:
3800 return -1;
3801
3802 }
3803}
e6b336ef
MC
3804
3805static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3806 const unsigned char *iv, int enc)
3807{
6435f0f6 3808 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
3809 if (!iv && !key)
3810 return 1;
3811 if (key) {
3812 do {
3813 /*
3814 * We set both the encrypt and decrypt key here because decrypt
3815 * needs both. We could possibly optimise to remove setting the
3816 * decrypt for an encryption operation.
3817 */
5158c763 3818# ifdef HWAES_CAPABLE
02dc0b82 3819 if (HWAES_CAPABLE) {
ed576acd 3820 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3821 &octx->ksenc.ks);
ed576acd 3822 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3823 &octx->ksdec.ks);
02dc0b82
AP
3824 if (!CRYPTO_ocb128_init(&octx->ocb,
3825 &octx->ksenc.ks, &octx->ksdec.ks,
3826 (block128_f) HWAES_encrypt,
3827 (block128_f) HWAES_decrypt,
3828 enc ? HWAES_ocb_encrypt
3829 : HWAES_ocb_decrypt))
3830 return 0;
3831 break;
3832 }
5158c763
MC
3833# endif
3834# ifdef VPAES_CAPABLE
0f113f3e 3835 if (VPAES_CAPABLE) {
ed576acd
TM
3836 vpaes_set_encrypt_key(key,
3837 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3838 &octx->ksenc.ks);
ed576acd
TM
3839 vpaes_set_decrypt_key(key,
3840 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3841 &octx->ksdec.ks);
bdc985b1
AP
3842 if (!CRYPTO_ocb128_init(&octx->ocb,
3843 &octx->ksenc.ks, &octx->ksdec.ks,
3844 (block128_f) vpaes_encrypt,
bd30091c
AP
3845 (block128_f) vpaes_decrypt,
3846 NULL))
0f113f3e
MC
3847 return 0;
3848 break;
3849 }
5158c763 3850# endif
ed576acd 3851 AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3852 &octx->ksenc.ks);
ed576acd 3853 AES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
6435f0f6 3854 &octx->ksdec.ks);
bdc985b1
AP
3855 if (!CRYPTO_ocb128_init(&octx->ocb,
3856 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 3857 (block128_f) AES_encrypt,
bd30091c
AP
3858 (block128_f) AES_decrypt,
3859 NULL))
0f113f3e
MC
3860 return 0;
3861 }
3862 while (0);
3863
3864 /*
3865 * If we have an iv we can set it directly, otherwise use saved IV.
3866 */
3867 if (iv == NULL && octx->iv_set)
3868 iv = octx->iv;
3869 if (iv) {
3870 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3871 != 1)
3872 return 0;
3873 octx->iv_set = 1;
3874 }
3875 octx->key_set = 1;
3876 } else {
3877 /* If key set use IV, otherwise copy */
3878 if (octx->key_set)
3879 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3880 else
3881 memcpy(octx->iv, iv, octx->ivlen);
3882 octx->iv_set = 1;
3883 }
3884 return 1;
3885}
e6b336ef
MC
3886
3887static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3888 const unsigned char *in, size_t len)
3889{
3890 unsigned char *buf;
3891 int *buf_len;
3892 int written_len = 0;
3893 size_t trailing_len;
6435f0f6 3894 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
3895
3896 /* If IV or Key not set then return error */
3897 if (!octx->iv_set)
3898 return -1;
3899
3900 if (!octx->key_set)
3901 return -1;
3902
0ba5a9ea 3903 if (in != NULL) {
0f113f3e
MC
3904 /*
3905 * Need to ensure we are only passing full blocks to low level OCB
3906 * routines. We do it here rather than in EVP_EncryptUpdate/
3907 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3908 * and those routines don't support that
3909 */
3910
3911 /* Are we dealing with AAD or normal data here? */
3912 if (out == NULL) {
3913 buf = octx->aad_buf;
3914 buf_len = &(octx->aad_buf_len);
3915 } else {
3916 buf = octx->data_buf;
3917 buf_len = &(octx->data_buf_len);
7141ba31 3918
6d777689 3919 if (ossl_is_partially_overlapping(out + *buf_len, in, len)) {
9311d0c4 3920 ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
7141ba31
MC
3921 return 0;
3922 }
0f113f3e
MC
3923 }
3924
3925 /*
3926 * If we've got a partially filled buffer from a previous call then
3927 * use that data first
3928 */
0ba5a9ea 3929 if (*buf_len > 0) {
0f113f3e
MC
3930 unsigned int remaining;
3931
0ba5a9ea 3932 remaining = AES_BLOCK_SIZE - (*buf_len);
0f113f3e
MC
3933 if (remaining > len) {
3934 memcpy(buf + (*buf_len), in, len);
3935 *(buf_len) += len;
3936 return 0;
3937 }
3938 memcpy(buf + (*buf_len), in, remaining);
3939
3940 /*
3941 * If we get here we've filled the buffer, so process it
3942 */
3943 len -= remaining;
3944 in += remaining;
3945 if (out == NULL) {
0ba5a9ea 3946 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
0f113f3e 3947 return -1;
ed576acd 3948 } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0ba5a9ea
MC
3949 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3950 AES_BLOCK_SIZE))
0f113f3e
MC
3951 return -1;
3952 } else {
0ba5a9ea
MC
3953 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3954 AES_BLOCK_SIZE))
0f113f3e
MC
3955 return -1;
3956 }
0ba5a9ea 3957 written_len = AES_BLOCK_SIZE;
0f113f3e 3958 *buf_len = 0;
7c12c7b6
MC
3959 if (out != NULL)
3960 out += AES_BLOCK_SIZE;
0f113f3e
MC
3961 }
3962
3963 /* Do we have a partial block to handle at the end? */
0ba5a9ea 3964 trailing_len = len % AES_BLOCK_SIZE;
0f113f3e
MC
3965
3966 /*
3967 * If we've got some full blocks to handle, then process these first
3968 */
3969 if (len != trailing_len) {
3970 if (out == NULL) {
3971 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3972 return -1;
ed576acd 3973 } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
3974 if (!CRYPTO_ocb128_encrypt
3975 (&octx->ocb, in, out, len - trailing_len))
3976 return -1;
3977 } else {
3978 if (!CRYPTO_ocb128_decrypt
3979 (&octx->ocb, in, out, len - trailing_len))
3980 return -1;
3981 }
3982 written_len += len - trailing_len;
3983 in += len - trailing_len;
3984 }
3985
3986 /* Handle any trailing partial block */
0ba5a9ea 3987 if (trailing_len > 0) {
0f113f3e
MC
3988 memcpy(buf, in, trailing_len);
3989 *buf_len = trailing_len;
3990 }
3991
3992 return written_len;
3993 } else {
3994 /*
3995 * First of all empty the buffer of any partial block that we might
3996 * have been provided - both for data and AAD
3997 */
0ba5a9ea 3998 if (octx->data_buf_len > 0) {
ed576acd 3999 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
4000 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4001 octx->data_buf_len))
4002 return -1;
4003 } else {
4004 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4005 octx->data_buf_len))
4006 return -1;
4007 }
4008 written_len = octx->data_buf_len;
4009 octx->data_buf_len = 0;
4010 }
0ba5a9ea 4011 if (octx->aad_buf_len > 0) {
0f113f3e
MC
4012 if (!CRYPTO_ocb128_aad
4013 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4014 return -1;
4015 octx->aad_buf_len = 0;
4016 }
4017 /* If decrypting then verify */
ed576acd 4018 if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
4019 if (octx->taglen < 0)
4020 return -1;
4021 if (CRYPTO_ocb128_finish(&octx->ocb,
4022 octx->tag, octx->taglen) != 0)
4023 return -1;
4024 octx->iv_set = 0;
4025 return written_len;
4026 }
4027 /* If encrypting then just get the tag */
4028 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4029 return -1;
4030 /* Don't reuse the IV */
4031 octx->iv_set = 0;
4032 return written_len;
4033 }
4034}
e6b336ef
MC
4035
4036static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
0f113f3e 4037{
6435f0f6 4038 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
0f113f3e
MC
4039 CRYPTO_ocb128_cleanup(&octx->ocb);
4040 return 1;
4041}
e6b336ef 4042
c4aede20
MC
4043BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4044 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4045BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4046 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4047BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4048 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
5158c763 4049#endif /* OPENSSL_NO_OCB */