]> git.ipfire.org Git - thirdparty/openssl.git/blame - crypto/evp/e_aes.c
Remove extra trailing semicolon
[thirdparty/openssl.git] / crypto / evp / e_aes.c
CommitLineData
aa6bb135 1/*
3c2bdd7d 2 * Copyright 2001-2021 The OpenSSL Project Authors. All Rights Reserved.
deb2c1a1 3 *
4a8b0c55 4 * Licensed under the Apache License 2.0 (the "License"). You may not use
aa6bb135
RS
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
deb2c1a1
DSH
8 */
9
c72fa255
MC
10/*
11 * This file uses the low level AES functions (which are deprecated for
12 * non-internal use) in order to implement the EVP AES ciphers.
13 */
14#include "internal/deprecated.h"
15
743694a6
MC
16#include <string.h>
17#include <assert.h>
8c84b677 18#include <openssl/opensslconf.h>
5158c763
MC
19#include <openssl/crypto.h>
20#include <openssl/evp.h>
21#include <openssl/err.h>
5158c763 22#include <openssl/aes.h>
743694a6
MC
23#include <openssl/rand.h>
24#include <openssl/cmac.h>
25f2138b 25#include "crypto/evp.h"
39147079 26#include "internal/cryptlib.h"
25f2138b
DMSP
27#include "crypto/modes.h"
28#include "crypto/siv.h"
cc731bc3 29#include "crypto/aes_platform.h"
706457b7 30#include "evp_local.h"
0f113f3e
MC
31
32typedef struct {
33 union {
39147079 34 OSSL_UNION_ALIGN;
0f113f3e
MC
35 AES_KEY ks;
36 } ks;
37 block128_f block;
38 union {
39 cbc128_f cbc;
40 ctr128_f ctr;
41 } stream;
42} EVP_AES_KEY;
43
44typedef struct {
45 union {
39147079 46 OSSL_UNION_ALIGN;
0f113f3e
MC
47 AES_KEY ks;
48 } ks; /* AES key schedule to use */
49 int key_set; /* Set if key initialised */
50 int iv_set; /* Set if an iv is set */
51 GCM128_CONTEXT gcm;
52 unsigned char *iv; /* Temporary IV store */
53 int ivlen; /* IV length */
54 int taglen;
55 int iv_gen; /* It is OK to generate IVs */
bcf082d1 56 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
0f113f3e 57 int tls_aad_len; /* TLS AAD length */
d6b34570 58 uint64_t tls_enc_records; /* Number of TLS records encrypted */
0f113f3e
MC
59 ctr128_f ctr;
60} EVP_AES_GCM_CTX;
61
62typedef struct {
63 union {
39147079 64 OSSL_UNION_ALIGN;
0f113f3e
MC
65 AES_KEY ks;
66 } ks1, ks2; /* AES key schedules to use */
67 XTS128_CONTEXT xts;
68 void (*stream) (const unsigned char *in,
69 unsigned char *out, size_t length,
70 const AES_KEY *key1, const AES_KEY *key2,
71 const unsigned char iv[16]);
72} EVP_AES_XTS_CTX;
73
f844f9eb 74#ifdef FIPS_MODULE
2c840201
P
75static const int allow_insecure_decrypt = 0;
76#else
77static const int allow_insecure_decrypt = 1;
78#endif
79
0f113f3e
MC
80typedef struct {
81 union {
39147079 82 OSSL_UNION_ALIGN;
0f113f3e
MC
83 AES_KEY ks;
84 } ks; /* AES key schedule to use */
85 int key_set; /* Set if key initialised */
86 int iv_set; /* Set if an iv is set */
87 int tag_set; /* Set if tag is valid */
88 int len_set; /* Set if message length set */
89 int L, M; /* L and M parameters from RFC3610 */
e75c5a79 90 int tls_aad_len; /* TLS AAD length */
0f113f3e
MC
91 CCM128_CONTEXT ccm;
92 ccm128_f str;
93} EVP_AES_CCM_CTX;
94
5158c763 95#ifndef OPENSSL_NO_OCB
0f113f3e 96typedef struct {
bdc985b1 97 union {
39147079 98 OSSL_UNION_ALIGN;
bdc985b1
AP
99 AES_KEY ks;
100 } ksenc; /* AES key schedule to use for encryption */
101 union {
39147079 102 OSSL_UNION_ALIGN;
bdc985b1
AP
103 AES_KEY ks;
104 } ksdec; /* AES key schedule to use for decryption */
0f113f3e
MC
105 int key_set; /* Set if key initialised */
106 int iv_set; /* Set if an iv is set */
107 OCB128_CONTEXT ocb;
108 unsigned char *iv; /* Temporary IV store */
109 unsigned char tag[16];
110 unsigned char data_buf[16]; /* Store partial data blocks */
111 unsigned char aad_buf[16]; /* Store partial AAD blocks */
112 int data_buf_len;
113 int aad_buf_len;
114 int ivlen; /* IV length */
115 int taglen;
116} EVP_AES_OCB_CTX;
5158c763 117#endif
e6b336ef 118
5158c763 119#define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
17f121de 120
03a5e5ae
PS
121/* increment counter (64-bit int) by 1 */
122static void ctr64_inc(unsigned char *counter)
123{
124 int n = 8;
125 unsigned char c;
126
127 do {
128 --n;
129 c = counter[n];
130 ++c;
131 counter[n] = c;
132 if (c)
133 return;
134 } while (n);
135}
136
459b15d4 137#if defined(AESNI_CAPABLE)
5158c763 138# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
5158c763 139# define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
0f113f3e 140 gctx->gcm.ghash==gcm_ghash_avx)
5158c763
MC
141# undef AES_GCM_ASM2 /* minor size optimization */
142# endif
4e049c52 143
17f121de 144static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
145 const unsigned char *iv, int enc)
146{
147 int ret, mode;
6435f0f6 148 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 149
6435f0f6 150 mode = EVP_CIPHER_CTX_mode(ctx);
0f113f3e
MC
151 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
152 && !enc) {
6435f0f6
RL
153 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
154 &dat->ks.ks);
0f113f3e
MC
155 dat->block = (block128_f) aesni_decrypt;
156 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
157 (cbc128_f) aesni_cbc_encrypt : NULL;
158 } else {
6435f0f6
RL
159 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
160 &dat->ks.ks);
0f113f3e
MC
161 dat->block = (block128_f) aesni_encrypt;
162 if (mode == EVP_CIPH_CBC_MODE)
163 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
164 else if (mode == EVP_CIPH_CTR_MODE)
165 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
166 else
167 dat->stream.cbc = NULL;
168 }
169
170 if (ret < 0) {
9311d0c4 171 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
172 return 0;
173 }
174
175 return 1;
176}
177
178static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
179 const unsigned char *in, size_t len)
d1fff483 180{
6435f0f6 181 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
9197c226 182 ctx->iv, EVP_CIPHER_CTX_encrypting(ctx));
d1fff483 183
0f113f3e 184 return 1;
d1fff483
AP
185}
186
0f113f3e
MC
187static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
188 const unsigned char *in, size_t len)
d1fff483 189{
6435f0f6 190 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
d1fff483 191
0f113f3e
MC
192 if (len < bl)
193 return 1;
d1fff483 194
6435f0f6
RL
195 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
196 EVP_CIPHER_CTX_encrypting(ctx));
d1fff483 197
0f113f3e 198 return 1;
d1fff483
AP
199}
200
5158c763 201# define aesni_ofb_cipher aes_ofb_cipher
0f113f3e
MC
202static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
203 const unsigned char *in, size_t len);
d1fff483 204
5158c763 205# define aesni_cfb_cipher aes_cfb_cipher
0f113f3e
MC
206static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
207 const unsigned char *in, size_t len);
d1fff483 208
5158c763 209# define aesni_cfb8_cipher aes_cfb8_cipher
0f113f3e
MC
210static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
211 const unsigned char *in, size_t len);
d1fff483 212
5158c763 213# define aesni_cfb1_cipher aes_cfb1_cipher
0f113f3e
MC
214static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
215 const unsigned char *in, size_t len);
d1fff483 216
5158c763 217# define aesni_ctr_cipher aes_ctr_cipher
17f121de 218static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 219 const unsigned char *in, size_t len);
d1fff483 220
17f121de 221static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
222 const unsigned char *iv, int enc)
223{
6435f0f6 224 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
225 if (!iv && !key)
226 return 1;
227 if (key) {
6435f0f6
RL
228 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
229 &gctx->ks.ks);
0f113f3e
MC
230 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
231 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
232 /*
233 * If we have an iv can set it directly, otherwise use saved IV.
234 */
235 if (iv == NULL && gctx->iv_set)
236 iv = gctx->iv;
237 if (iv) {
238 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
239 gctx->iv_set = 1;
240 }
241 gctx->key_set = 1;
242 } else {
243 /* If key set use IV, otherwise copy */
244 if (gctx->key_set)
245 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
246 else
247 memcpy(gctx->iv, iv, gctx->ivlen);
248 gctx->iv_set = 1;
249 gctx->iv_gen = 0;
250 }
251 return 1;
252}
253
5158c763 254# define aesni_gcm_cipher aes_gcm_cipher
17f121de 255static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 256 const unsigned char *in, size_t len);
17f121de
AP
257
258static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
259 const unsigned char *iv, int enc)
260{
6435f0f6 261 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 262
0f113f3e
MC
263 if (!iv && !key)
264 return 1;
265
266 if (key) {
3538b0f7
P
267 /* The key is two half length keys in reality */
268 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
269 const int bits = bytes * 8;
270
271 /*
272 * Verify that the two keys are different.
4bd8b240 273 *
3538b0f7
P
274 * This addresses Rogaway's vulnerability.
275 * See comment in aes_xts_init_key() below.
276 */
2c840201
P
277 if ((!allow_insecure_decrypt || enc)
278 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 279 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
280 return 0;
281 }
282
0f113f3e
MC
283 /* key_len is two AES keys */
284 if (enc) {
3538b0f7 285 aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
286 xctx->xts.block1 = (block128_f) aesni_encrypt;
287 xctx->stream = aesni_xts_encrypt;
288 } else {
3538b0f7 289 aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
290 xctx->xts.block1 = (block128_f) aesni_decrypt;
291 xctx->stream = aesni_xts_decrypt;
292 }
293
3538b0f7 294 aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
295 xctx->xts.block2 = (block128_f) aesni_encrypt;
296
297 xctx->xts.key1 = &xctx->ks1;
298 }
299
300 if (iv) {
301 xctx->xts.key2 = &xctx->ks2;
9197c226 302 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
303 }
304
305 return 1;
306}
307
5158c763 308# define aesni_xts_cipher aes_xts_cipher
17f121de 309static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 310 const unsigned char *in, size_t len);
17f121de
AP
311
312static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
313 const unsigned char *iv, int enc)
314{
6435f0f6 315 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
316 if (!iv && !key)
317 return 1;
318 if (key) {
6435f0f6
RL
319 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
320 &cctx->ks.ks);
0f113f3e
MC
321 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
322 &cctx->ks, (block128_f) aesni_encrypt);
323 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
324 (ccm128_f) aesni_ccm64_decrypt_blocks;
325 cctx->key_set = 1;
326 }
327 if (iv) {
9197c226 328 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
329 cctx->iv_set = 1;
330 }
331 return 1;
332}
333
5158c763 334# define aesni_ccm_cipher aes_ccm_cipher
17f121de 335static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 336 const unsigned char *in, size_t len);
17f121de 337
5158c763 338# ifndef OPENSSL_NO_OCB
e6b336ef 339static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
340 const unsigned char *iv, int enc)
341{
6435f0f6 342 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
343 if (!iv && !key)
344 return 1;
345 if (key) {
346 do {
347 /*
348 * We set both the encrypt and decrypt key here because decrypt
349 * needs both. We could possibly optimise to remove setting the
350 * decrypt for an encryption operation.
351 */
6435f0f6
RL
352 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
353 &octx->ksenc.ks);
354 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
355 &octx->ksdec.ks);
bdc985b1
AP
356 if (!CRYPTO_ocb128_init(&octx->ocb,
357 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 358 (block128_f) aesni_encrypt,
bd30091c
AP
359 (block128_f) aesni_decrypt,
360 enc ? aesni_ocb_encrypt
361 : aesni_ocb_decrypt))
0f113f3e
MC
362 return 0;
363 }
364 while (0);
365
366 /*
367 * If we have an iv we can set it directly, otherwise use saved IV.
368 */
369 if (iv == NULL && octx->iv_set)
370 iv = octx->iv;
371 if (iv) {
372 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
373 != 1)
374 return 0;
375 octx->iv_set = 1;
376 }
377 octx->key_set = 1;
378 } else {
379 /* If key set use IV, otherwise copy */
380 if (octx->key_set)
381 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
382 else
383 memcpy(octx->iv, iv, octx->ivlen);
384 octx->iv_set = 1;
385 }
386 return 1;
387}
388
5158c763 389# define aesni_ocb_cipher aes_ocb_cipher
e6b336ef 390static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 391 const unsigned char *in, size_t len);
5158c763 392# endif /* OPENSSL_NO_OCB */
e6b336ef 393
5158c763 394# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
17f121de 395static const EVP_CIPHER aesni_##keylen##_##mode = { \
0f113f3e
MC
396 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
397 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 398 EVP_ORIG_GLOBAL, \
0f113f3e
MC
399 aesni_init_key, \
400 aesni_##mode##_cipher, \
401 NULL, \
402 sizeof(EVP_AES_KEY), \
403 NULL,NULL,NULL,NULL }; \
17f121de 404static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 405 nid##_##keylen##_##nmode,blocksize, \
f6c95e46 406 keylen/8,ivlen, \
0f113f3e 407 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 408 EVP_ORIG_GLOBAL, \
0f113f3e
MC
409 aes_init_key, \
410 aes_##mode##_cipher, \
411 NULL, \
412 sizeof(EVP_AES_KEY), \
413 NULL,NULL,NULL,NULL }; \
17f121de 414const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
8ca28da0 415{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
17f121de 416
5158c763 417# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
17f121de 418static const EVP_CIPHER aesni_##keylen##_##mode = { \
0f113f3e 419 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
420 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
421 ivlen, \
0f113f3e 422 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 423 EVP_ORIG_GLOBAL, \
0f113f3e
MC
424 aesni_##mode##_init_key, \
425 aesni_##mode##_cipher, \
426 aes_##mode##_cleanup, \
427 sizeof(EVP_AES_##MODE##_CTX), \
428 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de 429static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 430 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
431 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
432 ivlen, \
0f113f3e 433 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 434 EVP_ORIG_GLOBAL, \
0f113f3e
MC
435 aes_##mode##_init_key, \
436 aes_##mode##_cipher, \
437 aes_##mode##_cleanup, \
438 sizeof(EVP_AES_##MODE##_CTX), \
439 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de 440const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
8ca28da0 441{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
d1fff483 442
459b15d4 443#elif defined(SPARC_AES_CAPABLE)
c5f6da54
AP
444
445static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
446 const unsigned char *iv, int enc)
447{
448 int ret, mode, bits;
6435f0f6 449 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 450
6435f0f6
RL
451 mode = EVP_CIPHER_CTX_mode(ctx);
452 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
0f113f3e
MC
453 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
454 && !enc) {
455 ret = 0;
6435f0f6 456 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
0f113f3e
MC
457 dat->block = (block128_f) aes_t4_decrypt;
458 switch (bits) {
459 case 128:
460 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
461 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
462 break;
463 case 192:
464 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
465 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
466 break;
467 case 256:
468 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
469 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
470 break;
471 default:
472 ret = -1;
473 }
474 } else {
475 ret = 0;
6435f0f6 476 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
0f113f3e
MC
477 dat->block = (block128_f) aes_t4_encrypt;
478 switch (bits) {
479 case 128:
480 if (mode == EVP_CIPH_CBC_MODE)
481 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
482 else if (mode == EVP_CIPH_CTR_MODE)
483 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
484 else
485 dat->stream.cbc = NULL;
486 break;
487 case 192:
488 if (mode == EVP_CIPH_CBC_MODE)
489 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
490 else if (mode == EVP_CIPH_CTR_MODE)
491 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
492 else
493 dat->stream.cbc = NULL;
494 break;
495 case 256:
496 if (mode == EVP_CIPH_CBC_MODE)
497 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
498 else if (mode == EVP_CIPH_CTR_MODE)
499 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
500 else
501 dat->stream.cbc = NULL;
502 break;
503 default:
504 ret = -1;
505 }
506 }
507
508 if (ret < 0) {
9311d0c4 509 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
510 return 0;
511 }
512
513 return 1;
514}
515
5158c763 516# define aes_t4_cbc_cipher aes_cbc_cipher
0f113f3e
MC
517static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
518 const unsigned char *in, size_t len);
519
5158c763 520# define aes_t4_ecb_cipher aes_ecb_cipher
0f113f3e
MC
521static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
522 const unsigned char *in, size_t len);
523
5158c763 524# define aes_t4_ofb_cipher aes_ofb_cipher
0f113f3e
MC
525static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
526 const unsigned char *in, size_t len);
527
5158c763 528# define aes_t4_cfb_cipher aes_cfb_cipher
0f113f3e
MC
529static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
530 const unsigned char *in, size_t len);
531
5158c763 532# define aes_t4_cfb8_cipher aes_cfb8_cipher
0f113f3e
MC
533static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
534 const unsigned char *in, size_t len);
535
5158c763 536# define aes_t4_cfb1_cipher aes_cfb1_cipher
0f113f3e
MC
537static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
538 const unsigned char *in, size_t len);
539
5158c763 540# define aes_t4_ctr_cipher aes_ctr_cipher
c5f6da54 541static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 542 const unsigned char *in, size_t len);
c5f6da54
AP
543
544static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
545 const unsigned char *iv, int enc)
546{
6435f0f6 547 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
548 if (!iv && !key)
549 return 1;
550 if (key) {
6435f0f6 551 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
0f113f3e
MC
552 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
553 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
554 (block128_f) aes_t4_encrypt);
555 switch (bits) {
556 case 128:
557 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
558 break;
559 case 192:
560 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
561 break;
562 case 256:
563 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
564 break;
565 default:
566 return 0;
567 }
568 /*
569 * If we have an iv can set it directly, otherwise use saved IV.
570 */
571 if (iv == NULL && gctx->iv_set)
572 iv = gctx->iv;
573 if (iv) {
574 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
575 gctx->iv_set = 1;
576 }
577 gctx->key_set = 1;
578 } else {
579 /* If key set use IV, otherwise copy */
580 if (gctx->key_set)
581 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
582 else
583 memcpy(gctx->iv, iv, gctx->ivlen);
584 gctx->iv_set = 1;
585 gctx->iv_gen = 0;
586 }
587 return 1;
588}
589
5158c763 590# define aes_t4_gcm_cipher aes_gcm_cipher
c5f6da54 591static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 592 const unsigned char *in, size_t len);
c5f6da54
AP
593
594static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
595 const unsigned char *iv, int enc)
596{
6435f0f6 597 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 598
0f113f3e
MC
599 if (!iv && !key)
600 return 1;
601
602 if (key) {
3538b0f7
P
603 /* The key is two half length keys in reality */
604 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
605 const int bits = bytes * 8;
606
607 /*
608 * Verify that the two keys are different.
4bd8b240 609 *
3538b0f7
P
610 * This addresses Rogaway's vulnerability.
611 * See comment in aes_xts_init_key() below.
612 */
2c840201
P
613 if ((!allow_insecure_decrypt || enc)
614 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 615 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
616 return 0;
617 }
618
0f113f3e
MC
619 xctx->stream = NULL;
620 /* key_len is two AES keys */
621 if (enc) {
622 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
623 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
624 switch (bits) {
625 case 128:
626 xctx->stream = aes128_t4_xts_encrypt;
627 break;
0f113f3e
MC
628 case 256:
629 xctx->stream = aes256_t4_xts_encrypt;
630 break;
631 default:
632 return 0;
633 }
634 } else {
3538b0f7 635 aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
636 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
637 switch (bits) {
638 case 128:
639 xctx->stream = aes128_t4_xts_decrypt;
640 break;
0f113f3e
MC
641 case 256:
642 xctx->stream = aes256_t4_xts_decrypt;
643 break;
644 default:
645 return 0;
646 }
647 }
648
3538b0f7 649 aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
650 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
651
652 xctx->xts.key1 = &xctx->ks1;
653 }
654
655 if (iv) {
656 xctx->xts.key2 = &xctx->ks2;
9197c226 657 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
658 }
659
660 return 1;
661}
662
5158c763 663# define aes_t4_xts_cipher aes_xts_cipher
c5f6da54 664static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 665 const unsigned char *in, size_t len);
c5f6da54
AP
666
667static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
668 const unsigned char *iv, int enc)
669{
6435f0f6 670 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
671 if (!iv && !key)
672 return 1;
673 if (key) {
6435f0f6 674 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
0f113f3e
MC
675 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
676 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
677 &cctx->ks, (block128_f) aes_t4_encrypt);
bdc985b1 678 cctx->str = NULL;
0f113f3e
MC
679 cctx->key_set = 1;
680 }
681 if (iv) {
9197c226 682 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
683 cctx->iv_set = 1;
684 }
685 return 1;
686}
687
5158c763 688# define aes_t4_ccm_cipher aes_ccm_cipher
c5f6da54 689static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 690 const unsigned char *in, size_t len);
c5f6da54 691
5158c763 692# ifndef OPENSSL_NO_OCB
e6b336ef 693static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
694 const unsigned char *iv, int enc)
695{
6435f0f6 696 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
697 if (!iv && !key)
698 return 1;
699 if (key) {
700 do {
701 /*
702 * We set both the encrypt and decrypt key here because decrypt
703 * needs both. We could possibly optimise to remove setting the
704 * decrypt for an encryption operation.
705 */
6435f0f6
RL
706 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
707 &octx->ksenc.ks);
708 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
709 &octx->ksdec.ks);
bdc985b1
AP
710 if (!CRYPTO_ocb128_init(&octx->ocb,
711 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 712 (block128_f) aes_t4_encrypt,
02dc0b82
AP
713 (block128_f) aes_t4_decrypt,
714 NULL))
0f113f3e
MC
715 return 0;
716 }
717 while (0);
718
719 /*
720 * If we have an iv we can set it directly, otherwise use saved IV.
721 */
722 if (iv == NULL && octx->iv_set)
723 iv = octx->iv;
724 if (iv) {
725 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
726 != 1)
727 return 0;
728 octx->iv_set = 1;
729 }
730 octx->key_set = 1;
731 } else {
732 /* If key set use IV, otherwise copy */
733 if (octx->key_set)
734 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
735 else
736 memcpy(octx->iv, iv, octx->ivlen);
737 octx->iv_set = 1;
738 }
739 return 1;
740}
741
5158c763 742# define aes_t4_ocb_cipher aes_ocb_cipher
e6b336ef 743static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 744 const unsigned char *in, size_t len);
5158c763 745# endif /* OPENSSL_NO_OCB */
e6b336ef 746
87d06aed
MC
747# ifndef OPENSSL_NO_SIV
748# define aes_t4_siv_init_key aes_siv_init_key
749# define aes_t4_siv_cipher aes_siv_cipher
750# endif /* OPENSSL_NO_SIV */
751
5158c763 752# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
c5f6da54 753static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
0f113f3e
MC
754 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
755 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 756 EVP_ORIG_GLOBAL, \
0f113f3e
MC
757 aes_t4_init_key, \
758 aes_t4_##mode##_cipher, \
759 NULL, \
760 sizeof(EVP_AES_KEY), \
761 NULL,NULL,NULL,NULL }; \
c5f6da54 762static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
763 nid##_##keylen##_##nmode,blocksize, \
764 keylen/8,ivlen, \
765 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 766 EVP_ORIG_GLOBAL, \
0f113f3e
MC
767 aes_init_key, \
768 aes_##mode##_cipher, \
769 NULL, \
770 sizeof(EVP_AES_KEY), \
771 NULL,NULL,NULL,NULL }; \
c5f6da54
AP
772const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
773{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
774
5158c763 775# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
c5f6da54 776static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
0f113f3e 777 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
778 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
779 ivlen, \
0f113f3e 780 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 781 EVP_ORIG_GLOBAL, \
0f113f3e
MC
782 aes_t4_##mode##_init_key, \
783 aes_t4_##mode##_cipher, \
784 aes_##mode##_cleanup, \
785 sizeof(EVP_AES_##MODE##_CTX), \
786 NULL,NULL,aes_##mode##_ctrl,NULL }; \
c5f6da54 787static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 788 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
789 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
790 ivlen, \
0f113f3e 791 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 792 EVP_ORIG_GLOBAL, \
0f113f3e
MC
793 aes_##mode##_init_key, \
794 aes_##mode##_cipher, \
795 aes_##mode##_cleanup, \
796 sizeof(EVP_AES_##MODE##_CTX), \
797 NULL,NULL,aes_##mode##_ctrl,NULL }; \
c5f6da54
AP
798const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
799{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
800
459b15d4
SL
801#elif defined(S390X_aes_128_CAPABLE)
802/* IBM S390X support */
55bd169f
PS
803typedef struct {
804 union {
39147079 805 OSSL_UNION_ALIGN;
55bd169f
PS
806 /*-
807 * KM-AES parameter block - begin
808 * (see z/Architecture Principles of Operation >= SA22-7832-06)
809 */
810 struct {
811 unsigned char k[32];
812 } param;
813 /* KM-AES parameter block - end */
814 } km;
815 unsigned int fc;
816} S390X_AES_ECB_CTX;
817
dacd2a87
PS
818typedef struct {
819 union {
39147079 820 OSSL_UNION_ALIGN;
dacd2a87
PS
821 /*-
822 * KMO-AES parameter block - begin
823 * (see z/Architecture Principles of Operation >= SA22-7832-08)
824 */
825 struct {
826 unsigned char cv[16];
827 unsigned char k[32];
828 } param;
829 /* KMO-AES parameter block - end */
830 } kmo;
831 unsigned int fc;
832
833 int res;
834} S390X_AES_OFB_CTX;
835
74d38a86
PS
836typedef struct {
837 union {
39147079 838 OSSL_UNION_ALIGN;
74d38a86
PS
839 /*-
840 * KMF-AES parameter block - begin
841 * (see z/Architecture Principles of Operation >= SA22-7832-08)
842 */
843 struct {
844 unsigned char cv[16];
845 unsigned char k[32];
846 } param;
847 /* KMF-AES parameter block - end */
848 } kmf;
849 unsigned int fc;
850
851 int res;
852} S390X_AES_CFB_CTX;
853
96530eea
PS
854typedef struct {
855 union {
39147079 856 OSSL_UNION_ALIGN;
96530eea 857 /*-
5d2a6f4b
PS
858 * KMA-GCM-AES parameter block - begin
859 * (see z/Architecture Principles of Operation >= SA22-7832-11)
96530eea
PS
860 */
861 struct {
862 unsigned char reserved[12];
863 union {
864 unsigned int w;
865 unsigned char b[4];
866 } cv;
867 union {
868 unsigned long long g[2];
869 unsigned char b[16];
870 } t;
871 unsigned char h[16];
872 unsigned long long taadl;
873 unsigned long long tpcl;
874 union {
875 unsigned long long g[2];
876 unsigned int w[4];
877 } j0;
878 unsigned char k[32];
879 } param;
5d2a6f4b 880 /* KMA-GCM-AES parameter block - end */
96530eea
PS
881 } kma;
882 unsigned int fc;
883 int key_set;
884
885 unsigned char *iv;
886 int ivlen;
887 int iv_set;
888 int iv_gen;
889
890 int taglen;
891
892 unsigned char ares[16];
893 unsigned char mres[16];
894 unsigned char kres[16];
895 int areslen;
896 int mreslen;
897 int kreslen;
898
899 int tls_aad_len;
d6b34570 900 uint64_t tls_enc_records; /* Number of TLS records encrypted */
96530eea
PS
901} S390X_AES_GCM_CTX;
902
39f5b069
PS
903typedef struct {
904 union {
39147079 905 OSSL_UNION_ALIGN;
39f5b069
PS
906 /*-
907 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
908 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
909 * rounds field is used to store the function code and that the key
910 * schedule is not stored (if aes hardware support is detected).
911 */
912 struct {
913 unsigned char pad[16];
914 AES_KEY k;
915 } key;
916
917 struct {
918 /*-
919 * KMAC-AES parameter block - begin
920 * (see z/Architecture Principles of Operation >= SA22-7832-08)
921 */
922 struct {
923 union {
924 unsigned long long g[2];
925 unsigned char b[16];
926 } icv;
927 unsigned char k[32];
928 } kmac_param;
79c44b4e 929 /* KMAC-AES parameter block - end */
39f5b069
PS
930
931 union {
932 unsigned long long g[2];
933 unsigned char b[16];
934 } nonce;
935 union {
936 unsigned long long g[2];
937 unsigned char b[16];
938 } buf;
939
940 unsigned long long blocks;
941 int l;
942 int m;
943 int tls_aad_len;
944 int iv_set;
945 int tag_set;
946 int len_set;
947 int key_set;
948
949 unsigned char pad[140];
950 unsigned int fc;
951 } ccm;
952 } aes;
953} S390X_AES_CCM_CTX;
954
96530eea
PS
955# define s390x_aes_init_key aes_init_key
956static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
957 const unsigned char *iv, int enc);
958
dd6b2706 959# define S390X_AES_CBC_CTX EVP_AES_KEY
55bd169f
PS
960
961# define s390x_aes_cbc_init_key aes_init_key
96530eea
PS
962
963# define s390x_aes_cbc_cipher aes_cbc_cipher
964static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
965 const unsigned char *in, size_t len);
966
55bd169f
PS
967static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
968 const unsigned char *key,
969 const unsigned char *iv, int enc)
970{
971 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
972 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
973
974 cctx->fc = S390X_AES_FC(keylen);
975 if (!enc)
976 cctx->fc |= S390X_DECRYPT;
977
978 memcpy(cctx->km.param.k, key, keylen);
979 return 1;
980}
96530eea 981
96530eea 982static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
55bd169f
PS
983 const unsigned char *in, size_t len)
984{
985 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
986
987 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
988 return 1;
989}
96530eea 990
dacd2a87
PS
991static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
992 const unsigned char *key,
993 const unsigned char *ivec, int enc)
994{
995 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
9197c226 996 const unsigned char *iv = ctx->oiv;
dacd2a87
PS
997 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
998 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
55bd169f 999
dacd2a87
PS
1000 memcpy(cctx->kmo.param.cv, iv, ivlen);
1001 memcpy(cctx->kmo.param.k, key, keylen);
1002 cctx->fc = S390X_AES_FC(keylen);
1003 cctx->res = 0;
1004 return 1;
1005}
96530eea 1006
96530eea 1007static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
dacd2a87
PS
1008 const unsigned char *in, size_t len)
1009{
1010 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1011 int n = cctx->res;
1012 int rem;
1013
1014 while (n && len) {
1015 *out = *in ^ cctx->kmo.param.cv[n];
1016 n = (n + 1) & 0xf;
1017 --len;
1018 ++in;
1019 ++out;
1020 }
1021
1022 rem = len & 0xf;
1023
1024 len &= ~(size_t)0xf;
1025 if (len) {
1026 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1027
1028 out += len;
1029 in += len;
1030 }
1031
1032 if (rem) {
1033 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1034 cctx->kmo.param.k);
1035
1036 while (rem--) {
1037 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1038 ++n;
1039 }
1040 }
1041
1042 cctx->res = n;
1043 return 1;
1044}
96530eea 1045
74d38a86
PS
1046static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1047 const unsigned char *key,
1048 const unsigned char *ivec, int enc)
1049{
1050 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
9197c226 1051 const unsigned char *iv = ctx->oiv;
74d38a86
PS
1052 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1053 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1054
1055 cctx->fc = S390X_AES_FC(keylen);
1056 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1057 if (!enc)
1058 cctx->fc |= S390X_DECRYPT;
55bd169f 1059
74d38a86
PS
1060 cctx->res = 0;
1061 memcpy(cctx->kmf.param.cv, iv, ivlen);
1062 memcpy(cctx->kmf.param.k, key, keylen);
1063 return 1;
1064}
96530eea 1065
96530eea 1066static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
74d38a86
PS
1067 const unsigned char *in, size_t len)
1068{
1069 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1070 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1071 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1072 int n = cctx->res;
1073 int rem;
1074 unsigned char tmp;
1075
1076 while (n && len) {
1077 tmp = *in;
1078 *out = cctx->kmf.param.cv[n] ^ tmp;
1079 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1080 n = (n + 1) & 0xf;
1081 --len;
1082 ++in;
1083 ++out;
1084 }
1085
1086 rem = len & 0xf;
1087
1088 len &= ~(size_t)0xf;
1089 if (len) {
1090 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1091
1092 out += len;
1093 in += len;
1094 }
1095
1096 if (rem) {
1097 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1098 S390X_AES_FC(keylen), cctx->kmf.param.k);
1099
1100 while (rem--) {
1101 tmp = in[n];
1102 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1103 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1104 ++n;
1105 }
1106 }
96530eea 1107
74d38a86
PS
1108 cctx->res = n;
1109 return 1;
1110}
1111
74d38a86
PS
1112static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1113 const unsigned char *key,
1114 const unsigned char *ivec, int enc)
1115{
1116 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
9197c226 1117 const unsigned char *iv = ctx->oiv;
74d38a86
PS
1118 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1119 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1120
1121 cctx->fc = S390X_AES_FC(keylen);
1122 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1123 if (!enc)
1124 cctx->fc |= S390X_DECRYPT;
96530eea 1125
74d38a86
PS
1126 memcpy(cctx->kmf.param.cv, iv, ivlen);
1127 memcpy(cctx->kmf.param.k, key, keylen);
1128 return 1;
1129}
55bd169f 1130
96530eea 1131static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
74d38a86
PS
1132 const unsigned char *in, size_t len)
1133{
1134 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1135
1136 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1137 return 1;
1138}
96530eea 1139
55bd169f
PS
1140# define s390x_aes_cfb1_init_key aes_init_key
1141
96530eea
PS
1142# define s390x_aes_cfb1_cipher aes_cfb1_cipher
1143static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1144 const unsigned char *in, size_t len);
1145
dd6b2706 1146# define S390X_AES_CTR_CTX EVP_AES_KEY
55bd169f
PS
1147
1148# define s390x_aes_ctr_init_key aes_init_key
96530eea
PS
1149
1150# define s390x_aes_ctr_cipher aes_ctr_cipher
1151static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1152 const unsigned char *in, size_t len);
1153
bcf082d1 1154/* iv + padding length for iv lengths != 12 */
dd6b2706 1155# define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
96530eea 1156
5d2a6f4b
PS
1157/*-
1158 * Process additional authenticated data. Returns 0 on success. Code is
1159 * big-endian.
1160 */
96530eea
PS
1161static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1162 size_t len)
1163{
1164 unsigned long long alen;
1165 int n, rem;
1166
1167 if (ctx->kma.param.tpcl)
1168 return -2;
1169
1170 alen = ctx->kma.param.taadl + len;
1171 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1172 return -1;
1173 ctx->kma.param.taadl = alen;
1174
1175 n = ctx->areslen;
1176 if (n) {
1177 while (n && len) {
1178 ctx->ares[n] = *aad;
1179 n = (n + 1) & 0xf;
1180 ++aad;
1181 --len;
1182 }
1183 /* ctx->ares contains a complete block if offset has wrapped around */
1184 if (!n) {
1185 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1186 ctx->fc |= S390X_KMA_HS;
1187 }
1188 ctx->areslen = n;
1189 }
1190
1191 rem = len & 0xf;
1192
25868993 1193 len &= ~(size_t)0xf;
96530eea
PS
1194 if (len) {
1195 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1196 aad += len;
1197 ctx->fc |= S390X_KMA_HS;
1198 }
1199
1200 if (rem) {
1201 ctx->areslen = rem;
1202
1203 do {
1204 --rem;
1205 ctx->ares[rem] = aad[rem];
1206 } while (rem);
1207 }
1208 return 0;
1209}
1210
5d2a6f4b
PS
1211/*-
1212 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1213 * success. Code is big-endian.
1214 */
96530eea
PS
1215static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1216 unsigned char *out, size_t len)
1217{
1218 const unsigned char *inptr;
1219 unsigned long long mlen;
1220 union {
1221 unsigned int w[4];
1222 unsigned char b[16];
1223 } buf;
1224 size_t inlen;
1225 int n, rem, i;
1226
1227 mlen = ctx->kma.param.tpcl + len;
1228 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1229 return -1;
1230 ctx->kma.param.tpcl = mlen;
1231
1232 n = ctx->mreslen;
1233 if (n) {
1234 inptr = in;
1235 inlen = len;
1236 while (n && inlen) {
1237 ctx->mres[n] = *inptr;
1238 n = (n + 1) & 0xf;
1239 ++inptr;
1240 --inlen;
1241 }
1242 /* ctx->mres contains a complete block if offset has wrapped around */
1243 if (!n) {
1244 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1245 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1246 ctx->fc |= S390X_KMA_HS;
1247 ctx->areslen = 0;
1248
1249 /* previous call already encrypted/decrypted its remainder,
1250 * see comment below */
1251 n = ctx->mreslen;
1252 while (n) {
1253 *out = buf.b[n];
1254 n = (n + 1) & 0xf;
1255 ++out;
1256 ++in;
1257 --len;
1258 }
1259 ctx->mreslen = 0;
1260 }
1261 }
1262
1263 rem = len & 0xf;
1264
25868993 1265 len &= ~(size_t)0xf;
96530eea
PS
1266 if (len) {
1267 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1268 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1269 in += len;
1270 out += len;
1271 ctx->fc |= S390X_KMA_HS;
1272 ctx->areslen = 0;
1273 }
1274
1275 /*-
1276 * If there is a remainder, it has to be saved such that it can be
1277 * processed by kma later. However, we also have to do the for-now
1278 * unauthenticated encryption/decryption part here and now...
1279 */
1280 if (rem) {
1281 if (!ctx->mreslen) {
1282 buf.w[0] = ctx->kma.param.j0.w[0];
1283 buf.w[1] = ctx->kma.param.j0.w[1];
1284 buf.w[2] = ctx->kma.param.j0.w[2];
1285 buf.w[3] = ctx->kma.param.cv.w + 1;
1286 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1287 }
1288
1289 n = ctx->mreslen;
1290 for (i = 0; i < rem; i++) {
1291 ctx->mres[n + i] = in[i];
1292 out[i] = in[i] ^ ctx->kres[n + i];
1293 }
1294
1295 ctx->mreslen += rem;
1296 }
1297 return 0;
1298}
1299
5d2a6f4b
PS
1300/*-
1301 * Initialize context structure. Code is big-endian.
1302 */
96530eea
PS
1303static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1304 const unsigned char *iv)
1305{
1306 ctx->kma.param.t.g[0] = 0;
1307 ctx->kma.param.t.g[1] = 0;
1308 ctx->kma.param.tpcl = 0;
1309 ctx->kma.param.taadl = 0;
1310 ctx->mreslen = 0;
1311 ctx->areslen = 0;
1312 ctx->kreslen = 0;
1313
1314 if (ctx->ivlen == 12) {
1315 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1316 ctx->kma.param.j0.w[3] = 1;
1317 ctx->kma.param.cv.w = 1;
1318 } else {
1319 /* ctx->iv has the right size and is already padded. */
1320 memcpy(ctx->iv, iv, ctx->ivlen);
1321 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1322 ctx->fc, &ctx->kma.param);
1323 ctx->fc |= S390X_KMA_HS;
1324
1325 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1326 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1327 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1328 ctx->kma.param.t.g[0] = 0;
1329 ctx->kma.param.t.g[1] = 0;
1330 }
1331}
1332
5d2a6f4b
PS
1333/*-
1334 * Performs various operations on the context structure depending on control
1335 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1336 * Code is big-endian.
1337 */
96530eea
PS
1338static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1339{
1340 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1341 S390X_AES_GCM_CTX *gctx_out;
1342 EVP_CIPHER_CTX *out;
9197c226 1343 unsigned char *buf;
96530eea
PS
1344 int ivlen, enc, len;
1345
1346 switch (type) {
1347 case EVP_CTRL_INIT:
7dddf2fc 1348 ivlen = EVP_CIPHER_iv_length(c->cipher);
96530eea
PS
1349 gctx->key_set = 0;
1350 gctx->iv_set = 0;
1351 gctx->ivlen = ivlen;
9197c226 1352 gctx->iv = c->iv;
96530eea
PS
1353 gctx->taglen = -1;
1354 gctx->iv_gen = 0;
1355 gctx->tls_aad_len = -1;
1356 return 1;
1357
7dddf2fc
SL
1358 case EVP_CTRL_GET_IVLEN:
1359 *(int *)ptr = gctx->ivlen;
1360 return 1;
1361
96530eea
PS
1362 case EVP_CTRL_AEAD_SET_IVLEN:
1363 if (arg <= 0)
1364 return 0;
1365
1366 if (arg != 12) {
96530eea
PS
1367 len = S390X_gcm_ivpadlen(arg);
1368
1369 /* Allocate memory for iv if needed. */
1370 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
9197c226 1371 if (gctx->iv != c->iv)
96530eea
PS
1372 OPENSSL_free(gctx->iv);
1373
cdb10bae 1374 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
9311d0c4 1375 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
96530eea 1376 return 0;
cdb10bae 1377 }
96530eea
PS
1378 }
1379 /* Add padding. */
1380 memset(gctx->iv + arg, 0, len - arg - 8);
1381 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1382 }
1383 gctx->ivlen = arg;
1384 return 1;
1385
1386 case EVP_CTRL_AEAD_SET_TAG:
1387 buf = EVP_CIPHER_CTX_buf_noconst(c);
1388 enc = EVP_CIPHER_CTX_encrypting(c);
1389 if (arg <= 0 || arg > 16 || enc)
1390 return 0;
1391
1392 memcpy(buf, ptr, arg);
1393 gctx->taglen = arg;
1394 return 1;
1395
1396 case EVP_CTRL_AEAD_GET_TAG:
1397 enc = EVP_CIPHER_CTX_encrypting(c);
1398 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1399 return 0;
1400
1401 memcpy(ptr, gctx->kma.param.t.b, arg);
1402 return 1;
1403
1404 case EVP_CTRL_GCM_SET_IV_FIXED:
1405 /* Special case: -1 length restores whole iv */
1406 if (arg == -1) {
1407 memcpy(gctx->iv, ptr, gctx->ivlen);
1408 gctx->iv_gen = 1;
1409 return 1;
1410 }
1411 /*
1412 * Fixed field must be at least 4 bytes and invocation field at least
1413 * 8.
1414 */
1415 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1416 return 0;
1417
1418 if (arg)
1419 memcpy(gctx->iv, ptr, arg);
1420
1421 enc = EVP_CIPHER_CTX_encrypting(c);
16cfc2c9
KR
1422 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1423 return 0;
96530eea
PS
1424
1425 gctx->iv_gen = 1;
1426 return 1;
1427
1428 case EVP_CTRL_GCM_IV_GEN:
1429 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1430 return 0;
1431
1432 s390x_aes_gcm_setiv(gctx, gctx->iv);
1433
1434 if (arg <= 0 || arg > gctx->ivlen)
1435 arg = gctx->ivlen;
1436
1437 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1438 /*
1439 * Invocation field will be at least 8 bytes in size and so no need
1440 * to check wrap around or increment more than last 8 bytes.
1441 */
03a5e5ae 1442 ctr64_inc(gctx->iv + gctx->ivlen - 8);
96530eea
PS
1443 gctx->iv_set = 1;
1444 return 1;
1445
1446 case EVP_CTRL_GCM_SET_IV_INV:
1447 enc = EVP_CIPHER_CTX_encrypting(c);
1448 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1449 return 0;
1450
1451 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1452 s390x_aes_gcm_setiv(gctx, gctx->iv);
1453 gctx->iv_set = 1;
1454 return 1;
1455
1456 case EVP_CTRL_AEAD_TLS1_AAD:
1457 /* Save the aad for later use. */
1458 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1459 return 0;
1460
1461 buf = EVP_CIPHER_CTX_buf_noconst(c);
1462 memcpy(buf, ptr, arg);
1463 gctx->tls_aad_len = arg;
d6b34570 1464 gctx->tls_enc_records = 0;
96530eea
PS
1465
1466 len = buf[arg - 2] << 8 | buf[arg - 1];
1467 /* Correct length for explicit iv. */
1468 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1469 return 0;
1470 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1471
1472 /* If decrypting correct for tag too. */
1473 enc = EVP_CIPHER_CTX_encrypting(c);
1474 if (!enc) {
1475 if (len < EVP_GCM_TLS_TAG_LEN)
1476 return 0;
1477 len -= EVP_GCM_TLS_TAG_LEN;
1478 }
1479 buf[arg - 2] = len >> 8;
1480 buf[arg - 1] = len & 0xff;
1481 /* Extra padding: tag appended to record. */
1482 return EVP_GCM_TLS_TAG_LEN;
1483
1484 case EVP_CTRL_COPY:
1485 out = ptr;
1486 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
96530eea 1487
9197c226
BK
1488 if (gctx->iv == c->iv) {
1489 gctx_out->iv = out->iv;
96530eea
PS
1490 } else {
1491 len = S390X_gcm_ivpadlen(gctx->ivlen);
1492
cdb10bae 1493 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
9311d0c4 1494 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
96530eea 1495 return 0;
cdb10bae 1496 }
96530eea
PS
1497
1498 memcpy(gctx_out->iv, gctx->iv, len);
1499 }
1500 return 1;
1501
1502 default:
1503 return -1;
1504 }
1505}
1506
5d2a6f4b
PS
1507/*-
1508 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1509 */
96530eea
PS
1510static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1511 const unsigned char *key,
1512 const unsigned char *iv, int enc)
1513{
1514 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1515 int keylen;
1516
1517 if (iv == NULL && key == NULL)
1518 return 1;
1519
1520 if (key != NULL) {
1521 keylen = EVP_CIPHER_CTX_key_length(ctx);
1522 memcpy(&gctx->kma.param.k, key, keylen);
1523
8eb399fb 1524 gctx->fc = S390X_AES_FC(keylen);
96530eea
PS
1525 if (!enc)
1526 gctx->fc |= S390X_DECRYPT;
1527
1528 if (iv == NULL && gctx->iv_set)
1529 iv = gctx->iv;
1530
1531 if (iv != NULL) {
1532 s390x_aes_gcm_setiv(gctx, iv);
1533 gctx->iv_set = 1;
1534 }
1535 gctx->key_set = 1;
1536 } else {
1537 if (gctx->key_set)
1538 s390x_aes_gcm_setiv(gctx, iv);
1539 else
1540 memcpy(gctx->iv, iv, gctx->ivlen);
1541
1542 gctx->iv_set = 1;
1543 gctx->iv_gen = 0;
1544 }
1545 return 1;
1546}
1547
5d2a6f4b
PS
1548/*-
1549 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1550 * if successful. Otherwise -1 is returned. Code is big-endian.
1551 */
96530eea
PS
1552static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1553 const unsigned char *in, size_t len)
1554{
1555 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1556 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1557 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1558 int rv = -1;
1559
1560 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1561 return -1;
1562
d6b34570
P
1563 /*
1564 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1565 * Requirements from SP 800-38D". The requirements is for one party to the
1566 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1567 * side only.
1568 */
1569 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
9311d0c4 1570 ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
d6b34570
P
1571 goto err;
1572 }
1573
96530eea
PS
1574 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1575 : EVP_CTRL_GCM_SET_IV_INV,
1576 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1577 goto err;
1578
1579 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1580 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1581 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1582
1583 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1584 gctx->kma.param.tpcl = len << 3;
1585 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1586 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1587
1588 if (enc) {
1589 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1590 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1591 } else {
1592 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1593 EVP_GCM_TLS_TAG_LEN)) {
1594 OPENSSL_cleanse(out, len);
1595 goto err;
1596 }
1597 rv = len;
1598 }
1599err:
1600 gctx->iv_set = 0;
1601 gctx->tls_aad_len = -1;
1602 return rv;
1603}
1604
5d2a6f4b
PS
1605/*-
1606 * Called from EVP layer to initialize context, process additional
1607 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1608 * ciphertext or process a TLS packet, depending on context. Returns bytes
1609 * written on success. Otherwise -1 is returned. Code is big-endian.
1610 */
96530eea
PS
1611static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1612 const unsigned char *in, size_t len)
1613{
1614 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1615 unsigned char *buf, tmp[16];
1616 int enc;
1617
1618 if (!gctx->key_set)
1619 return -1;
1620
1621 if (gctx->tls_aad_len >= 0)
1622 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1623
1624 if (!gctx->iv_set)
1625 return -1;
1626
1627 if (in != NULL) {
1628 if (out == NULL) {
1629 if (s390x_aes_gcm_aad(gctx, in, len))
1630 return -1;
1631 } else {
1632 if (s390x_aes_gcm(gctx, in, out, len))
1633 return -1;
1634 }
1635 return len;
1636 } else {
1637 gctx->kma.param.taadl <<= 3;
1638 gctx->kma.param.tpcl <<= 3;
1639 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1640 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1641 /* recall that we already did en-/decrypt gctx->mres
1642 * and returned it to caller... */
1643 OPENSSL_cleanse(tmp, gctx->mreslen);
1644 gctx->iv_set = 0;
1645
1646 enc = EVP_CIPHER_CTX_encrypting(ctx);
1647 if (enc) {
1648 gctx->taglen = 16;
1649 } else {
1650 if (gctx->taglen < 0)
1651 return -1;
1652
1653 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1654 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1655 return -1;
1656 }
1657 return 0;
1658 }
1659}
1660
1661static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1662{
1663 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
96530eea
PS
1664
1665 if (gctx == NULL)
1666 return 0;
1667
9197c226 1668 if (gctx->iv != c->iv)
96530eea
PS
1669 OPENSSL_free(gctx->iv);
1670
1671 OPENSSL_cleanse(gctx, sizeof(*gctx));
1672 return 1;
1673}
1674
dd6b2706 1675# define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
96530eea
PS
1676
1677# define s390x_aes_xts_init_key aes_xts_init_key
1678static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1679 const unsigned char *key,
1680 const unsigned char *iv, int enc);
1681# define s390x_aes_xts_cipher aes_xts_cipher
1682static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1683 const unsigned char *in, size_t len);
1684# define s390x_aes_xts_ctrl aes_xts_ctrl
1685static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1686# define s390x_aes_xts_cleanup aes_xts_cleanup
1687
39f5b069
PS
1688/*-
1689 * Set nonce and length fields. Code is big-endian.
1690 */
1691static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1692 const unsigned char *nonce,
1693 size_t mlen)
1694{
1695 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1696 ctx->aes.ccm.nonce.g[1] = mlen;
1697 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1698}
1699
1700/*-
1701 * Process additional authenticated data. Code is big-endian.
1702 */
1703static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1704 size_t alen)
1705{
1706 unsigned char *ptr;
1707 int i, rem;
1708
1709 if (!alen)
1710 return;
1711
1712 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1713
1714 /* Suppress 'type-punned pointer dereference' warning. */
1715 ptr = ctx->aes.ccm.buf.b;
1716
1717 if (alen < ((1 << 16) - (1 << 8))) {
1718 *(uint16_t *)ptr = alen;
1719 i = 2;
1720 } else if (sizeof(alen) == 8
1721 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1722 *(uint16_t *)ptr = 0xffff;
1723 *(uint64_t *)(ptr + 2) = alen;
1724 i = 10;
1725 } else {
1726 *(uint16_t *)ptr = 0xfffe;
1727 *(uint32_t *)(ptr + 2) = alen;
1728 i = 6;
1729 }
1730
1731 while (i < 16 && alen) {
1732 ctx->aes.ccm.buf.b[i] = *aad;
1733 ++aad;
1734 --alen;
1735 ++i;
1736 }
1737 while (i < 16) {
1738 ctx->aes.ccm.buf.b[i] = 0;
1739 ++i;
1740 }
1741
1742 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1743 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1744 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1745 &ctx->aes.ccm.kmac_param);
1746 ctx->aes.ccm.blocks += 2;
1747
1748 rem = alen & 0xf;
25868993 1749 alen &= ~(size_t)0xf;
39f5b069
PS
1750 if (alen) {
1751 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1752 ctx->aes.ccm.blocks += alen >> 4;
1753 aad += alen;
1754 }
1755 if (rem) {
1756 for (i = 0; i < rem; i++)
1757 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1758
1759 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1760 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1761 ctx->aes.ccm.kmac_param.k);
1762 ctx->aes.ccm.blocks++;
1763 }
1764}
1765
1766/*-
1767 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1768 * success.
1769 */
1770static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1771 unsigned char *out, size_t len, int enc)
1772{
1773 size_t n, rem;
1774 unsigned int i, l, num;
1775 unsigned char flags;
1776
1777 flags = ctx->aes.ccm.nonce.b[0];
1778 if (!(flags & S390X_CCM_AAD_FLAG)) {
1779 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1780 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1781 ctx->aes.ccm.blocks++;
1782 }
1783 l = flags & 0x7;
1784 ctx->aes.ccm.nonce.b[0] = l;
1785
1786 /*-
1787 * Reconstruct length from encoded length field
1788 * and initialize it with counter value.
1789 */
1790 n = 0;
1791 for (i = 15 - l; i < 15; i++) {
1792 n |= ctx->aes.ccm.nonce.b[i];
1793 ctx->aes.ccm.nonce.b[i] = 0;
1794 n <<= 8;
1795 }
1796 n |= ctx->aes.ccm.nonce.b[15];
1797 ctx->aes.ccm.nonce.b[15] = 1;
1798
1799 if (n != len)
dd6b2706 1800 return -1; /* length mismatch */
39f5b069
PS
1801
1802 if (enc) {
1803 /* Two operations per block plus one for tag encryption */
1804 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1805 if (ctx->aes.ccm.blocks > (1ULL << 61))
dd6b2706 1806 return -2; /* too much data */
39f5b069
PS
1807 }
1808
1809 num = 0;
1810 rem = len & 0xf;
25868993 1811 len &= ~(size_t)0xf;
39f5b069
PS
1812
1813 if (enc) {
1814 /* mac-then-encrypt */
1815 if (len)
1816 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1817 if (rem) {
1818 for (i = 0; i < rem; i++)
1819 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1820
1821 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1822 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1823 ctx->aes.ccm.kmac_param.k);
1824 }
1825
1826 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1827 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1828 &num, (ctr128_f)AES_ctr32_encrypt);
1829 } else {
1830 /* decrypt-then-mac */
1831 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1832 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1833 &num, (ctr128_f)AES_ctr32_encrypt);
1834
1835 if (len)
1836 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1837 if (rem) {
1838 for (i = 0; i < rem; i++)
1839 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1840
1841 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1842 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1843 ctx->aes.ccm.kmac_param.k);
1844 }
1845 }
1846 /* encrypt tag */
1847 for (i = 15 - l; i < 16; i++)
1848 ctx->aes.ccm.nonce.b[i] = 0;
1849
1850 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1851 ctx->aes.ccm.kmac_param.k);
1852 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1853 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1854
dd6b2706 1855 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
39f5b069
PS
1856 return 0;
1857}
1858
1859/*-
1860 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1861 * if successful. Otherwise -1 is returned.
1862 */
1863static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1864 const unsigned char *in, size_t len)
1865{
1866 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
9197c226 1867 unsigned char *ivec = ctx->iv;
39f5b069
PS
1868 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1869 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1870
1871 if (out != in
1872 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1873 return -1;
1874
1875 if (enc) {
1876 /* Set explicit iv (sequence number). */
1877 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1878 }
1879
1880 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1881 /*-
1882 * Get explicit iv (sequence number). We already have fixed iv
1883 * (server/client_write_iv) here.
1884 */
1885 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1886 s390x_aes_ccm_setiv(cctx, ivec, len);
1887
1888 /* Process aad (sequence number|type|version|length) */
1889 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1890
1891 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1892 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
96530eea 1893
39f5b069
PS
1894 if (enc) {
1895 if (s390x_aes_ccm(cctx, in, out, len, enc))
1896 return -1;
1897
1898 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1899 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1900 } else {
1901 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1902 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1903 cctx->aes.ccm.m))
1904 return len;
1905 }
1906
1907 OPENSSL_cleanse(out, len);
1908 return -1;
1909 }
1910}
1911
1912/*-
1913 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1914 * returned.
1915 */
96530eea
PS
1916static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1917 const unsigned char *key,
39f5b069
PS
1918 const unsigned char *iv, int enc)
1919{
1920 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
39f5b069
PS
1921 int keylen;
1922
1923 if (iv == NULL && key == NULL)
1924 return 1;
1925
1926 if (key != NULL) {
1927 keylen = EVP_CIPHER_CTX_key_length(ctx);
8eb399fb 1928 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
39f5b069
PS
1929 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1930
1931 /* Store encoded m and l. */
1932 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1933 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1934 memset(cctx->aes.ccm.nonce.b + 1, 0,
1935 sizeof(cctx->aes.ccm.nonce.b));
1936 cctx->aes.ccm.blocks = 0;
1937
1938 cctx->aes.ccm.key_set = 1;
1939 }
1940
1941 if (iv != NULL) {
9197c226 1942 memcpy(ctx->iv, iv, 15 - cctx->aes.ccm.l);
39f5b069
PS
1943
1944 cctx->aes.ccm.iv_set = 1;
1945 }
1946
1947 return 1;
1948}
1949
1950/*-
1951 * Called from EVP layer to initialize context, process additional
1952 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1953 * plaintext or process a TLS packet, depending on context. Returns bytes
1954 * written on success. Otherwise -1 is returned.
1955 */
96530eea 1956static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
39f5b069
PS
1957 const unsigned char *in, size_t len)
1958{
1959 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1960 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1961 int rv;
9197c226 1962 unsigned char *buf;
39f5b069
PS
1963
1964 if (!cctx->aes.ccm.key_set)
1965 return -1;
1966
1967 if (cctx->aes.ccm.tls_aad_len >= 0)
1968 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1969
1970 /*-
1971 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1972 * so integrity must be checked already at Update() i.e., before
1973 * potentially corrupted data is output.
1974 */
1975 if (in == NULL && out != NULL)
1976 return 0;
1977
1978 if (!cctx->aes.ccm.iv_set)
1979 return -1;
1980
39f5b069
PS
1981 if (out == NULL) {
1982 /* Update(): Pass message length. */
1983 if (in == NULL) {
9197c226 1984 s390x_aes_ccm_setiv(cctx, ctx->iv, len);
39f5b069
PS
1985
1986 cctx->aes.ccm.len_set = 1;
1987 return len;
1988 }
1989
1990 /* Update(): Process aad. */
1991 if (!cctx->aes.ccm.len_set && len)
1992 return -1;
1993
1994 s390x_aes_ccm_aad(cctx, in, len);
1995 return len;
1996 }
1997
887e22dd
PS
1998 /* The tag must be set before actually decrypting data */
1999 if (!enc && !cctx->aes.ccm.tag_set)
2000 return -1;
2001
39f5b069
PS
2002 /* Update(): Process message. */
2003
2004 if (!cctx->aes.ccm.len_set) {
2005 /*-
46d08509 2006 * In case message length was not previously set explicitly via
39f5b069
PS
2007 * Update(), set it now.
2008 */
9197c226 2009 s390x_aes_ccm_setiv(cctx, ctx->iv, len);
39f5b069
PS
2010
2011 cctx->aes.ccm.len_set = 1;
2012 }
2013
2014 if (enc) {
2015 if (s390x_aes_ccm(cctx, in, out, len, enc))
2016 return -1;
2017
2018 cctx->aes.ccm.tag_set = 1;
2019 return len;
2020 } else {
2021 rv = -1;
2022
2023 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2024 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2025 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2026 cctx->aes.ccm.m))
2027 rv = len;
2028 }
2029
2030 if (rv == -1)
2031 OPENSSL_cleanse(out, len);
2032
2033 cctx->aes.ccm.iv_set = 0;
2034 cctx->aes.ccm.tag_set = 0;
2035 cctx->aes.ccm.len_set = 0;
2036 return rv;
2037 }
2038}
2039
2040/*-
2041 * Performs various operations on the context structure depending on control
2042 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2043 * Code is big-endian.
2044 */
2045static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2046{
2047 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
9197c226 2048 unsigned char *buf;
39f5b069
PS
2049 int enc, len;
2050
2051 switch (type) {
2052 case EVP_CTRL_INIT:
2053 cctx->aes.ccm.key_set = 0;
2054 cctx->aes.ccm.iv_set = 0;
2055 cctx->aes.ccm.l = 8;
2056 cctx->aes.ccm.m = 12;
2057 cctx->aes.ccm.tag_set = 0;
2058 cctx->aes.ccm.len_set = 0;
2059 cctx->aes.ccm.tls_aad_len = -1;
2060 return 1;
2061
7dddf2fc
SL
2062 case EVP_CTRL_GET_IVLEN:
2063 *(int *)ptr = 15 - cctx->aes.ccm.l;
2064 return 1;
2065
39f5b069
PS
2066 case EVP_CTRL_AEAD_TLS1_AAD:
2067 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2068 return 0;
2069
2070 /* Save the aad for later use. */
2071 buf = EVP_CIPHER_CTX_buf_noconst(c);
2072 memcpy(buf, ptr, arg);
2073 cctx->aes.ccm.tls_aad_len = arg;
2074
03a5e5ae 2075 len = buf[arg - 2] << 8 | buf[arg - 1];
39f5b069
PS
2076 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2077 return 0;
2078
2079 /* Correct length for explicit iv. */
2080 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2081
2082 enc = EVP_CIPHER_CTX_encrypting(c);
2083 if (!enc) {
2084 if (len < cctx->aes.ccm.m)
2085 return 0;
2086
2087 /* Correct length for tag. */
2088 len -= cctx->aes.ccm.m;
2089 }
2090
03a5e5ae
PS
2091 buf[arg - 2] = len >> 8;
2092 buf[arg - 1] = len & 0xff;
2093
39f5b069
PS
2094 /* Extra padding: tag appended to record. */
2095 return cctx->aes.ccm.m;
2096
2097 case EVP_CTRL_CCM_SET_IV_FIXED:
2098 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2099 return 0;
2100
2101 /* Copy to first part of the iv. */
9197c226 2102 memcpy(c->iv, ptr, arg);
39f5b069
PS
2103 return 1;
2104
2105 case EVP_CTRL_AEAD_SET_IVLEN:
2106 arg = 15 - arg;
2107 /* fall-through */
2108
2109 case EVP_CTRL_CCM_SET_L:
2110 if (arg < 2 || arg > 8)
2111 return 0;
2112
2113 cctx->aes.ccm.l = arg;
2114 return 1;
2115
2116 case EVP_CTRL_AEAD_SET_TAG:
2117 if ((arg & 1) || arg < 4 || arg > 16)
2118 return 0;
2119
2120 enc = EVP_CIPHER_CTX_encrypting(c);
2121 if (enc && ptr)
2122 return 0;
2123
2124 if (ptr) {
2125 cctx->aes.ccm.tag_set = 1;
2126 buf = EVP_CIPHER_CTX_buf_noconst(c);
2127 memcpy(buf, ptr, arg);
2128 }
2129
2130 cctx->aes.ccm.m = arg;
2131 return 1;
2132
2133 case EVP_CTRL_AEAD_GET_TAG:
2134 enc = EVP_CIPHER_CTX_encrypting(c);
2135 if (!enc || !cctx->aes.ccm.tag_set)
2136 return 0;
2137
2138 if(arg < cctx->aes.ccm.m)
2139 return 0;
2140
2141 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2142 cctx->aes.ccm.tag_set = 0;
2143 cctx->aes.ccm.iv_set = 0;
2144 cctx->aes.ccm.len_set = 0;
2145 return 1;
2146
2147 case EVP_CTRL_COPY:
2148 return 1;
2149
2150 default:
2151 return -1;
2152 }
2153}
2154
96530eea
PS
2155# define s390x_aes_ccm_cleanup aes_ccm_cleanup
2156
2157# ifndef OPENSSL_NO_OCB
dd6b2706 2158# define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
96530eea
PS
2159
2160# define s390x_aes_ocb_init_key aes_ocb_init_key
2161static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2162 const unsigned char *iv, int enc);
2163# define s390x_aes_ocb_cipher aes_ocb_cipher
2164static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2165 const unsigned char *in, size_t len);
2166# define s390x_aes_ocb_cleanup aes_ocb_cleanup
2167static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2168# define s390x_aes_ocb_ctrl aes_ocb_ctrl
2169static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2170# endif
2171
e74be3d4
RL
2172# ifndef OPENSSL_NO_SIV
2173# define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
e74be3d4
RL
2174
2175# define s390x_aes_siv_init_key aes_siv_init_key
2176# define s390x_aes_siv_cipher aes_siv_cipher
2177# define s390x_aes_siv_cleanup aes_siv_cleanup
2178# define s390x_aes_siv_ctrl aes_siv_ctrl
2179# endif
2180
dd6b2706
P
2181# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2182 MODE,flags) \
2183static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2184 nid##_##keylen##_##nmode,blocksize, \
2185 keylen / 8, \
2186 ivlen, \
2187 flags | EVP_CIPH_##MODE##_MODE, \
2188 s390x_aes_##mode##_init_key, \
2189 s390x_aes_##mode##_cipher, \
2190 NULL, \
2191 sizeof(S390X_AES_##MODE##_CTX), \
2192 NULL, \
2193 NULL, \
2194 NULL, \
2195 NULL \
2196}; \
2197static const EVP_CIPHER aes_##keylen##_##mode = { \
2198 nid##_##keylen##_##nmode, \
2199 blocksize, \
2200 keylen / 8, \
2201 ivlen, \
2202 flags | EVP_CIPH_##MODE##_MODE, \
2203 aes_init_key, \
2204 aes_##mode##_cipher, \
2205 NULL, \
2206 sizeof(EVP_AES_KEY), \
2207 NULL, \
2208 NULL, \
2209 NULL, \
2210 NULL \
2211}; \
2212const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2213{ \
2214 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2215 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
96530eea
PS
2216}
2217
2218# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
dd6b2706
P
2219static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2220 nid##_##keylen##_##mode, \
2221 blocksize, \
2222 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2223 ivlen, \
2224 flags | EVP_CIPH_##MODE##_MODE, \
2225 s390x_aes_##mode##_init_key, \
2226 s390x_aes_##mode##_cipher, \
2227 s390x_aes_##mode##_cleanup, \
2228 sizeof(S390X_AES_##MODE##_CTX), \
2229 NULL, \
2230 NULL, \
2231 s390x_aes_##mode##_ctrl, \
2232 NULL \
2233}; \
2234static const EVP_CIPHER aes_##keylen##_##mode = { \
2235 nid##_##keylen##_##mode,blocksize, \
2236 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2237 ivlen, \
2238 flags | EVP_CIPH_##MODE##_MODE, \
2239 aes_##mode##_init_key, \
2240 aes_##mode##_cipher, \
2241 aes_##mode##_cleanup, \
2242 sizeof(EVP_AES_##MODE##_CTX), \
2243 NULL, \
2244 NULL, \
2245 aes_##mode##_ctrl, \
2246 NULL \
2247}; \
2248const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2249{ \
2250 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2251 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
96530eea
PS
2252}
2253
5158c763 2254#else
17f121de 2255
5158c763 2256# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
17f121de 2257static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
2258 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2259 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 2260 EVP_ORIG_GLOBAL, \
0f113f3e
MC
2261 aes_init_key, \
2262 aes_##mode##_cipher, \
2263 NULL, \
2264 sizeof(EVP_AES_KEY), \
2265 NULL,NULL,NULL,NULL }; \
17f121de
AP
2266const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2267{ return &aes_##keylen##_##mode; }
d1fff483 2268
5158c763 2269# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
17f121de 2270static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 2271 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
2272 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2273 ivlen, \
0f113f3e 2274 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 2275 EVP_ORIG_GLOBAL, \
0f113f3e
MC
2276 aes_##mode##_init_key, \
2277 aes_##mode##_cipher, \
2278 aes_##mode##_cleanup, \
2279 sizeof(EVP_AES_##MODE##_CTX), \
2280 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de
AP
2281const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2282{ return &aes_##keylen##_##mode; }
9575d1a9 2283
5158c763 2284#endif
9575d1a9 2285
5158c763 2286#define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
0f113f3e
MC
2287 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2288 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2289 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2290 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2291 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2292 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2293 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
d1fff483
AP
2294
2295static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
2296 const unsigned char *iv, int enc)
2297{
2298 int ret, mode;
6435f0f6 2299 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2300
6435f0f6 2301 mode = EVP_CIPHER_CTX_mode(ctx);
0f113f3e 2302 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
c01a3c6d 2303 && !enc) {
5158c763 2304#ifdef HWAES_CAPABLE
0f113f3e 2305 if (HWAES_CAPABLE) {
6435f0f6
RL
2306 ret = HWAES_set_decrypt_key(key,
2307 EVP_CIPHER_CTX_key_length(ctx) * 8,
2308 &dat->ks.ks);
0f113f3e
MC
2309 dat->block = (block128_f) HWAES_decrypt;
2310 dat->stream.cbc = NULL;
5158c763 2311# ifdef HWAES_cbc_encrypt
0f113f3e
MC
2312 if (mode == EVP_CIPH_CBC_MODE)
2313 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
0f113f3e 2314# endif
5158c763
MC
2315 } else
2316#endif
2317#ifdef BSAES_CAPABLE
0f113f3e 2318 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
6435f0f6
RL
2319 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2320 &dat->ks.ks);
0f113f3e
MC
2321 dat->block = (block128_f) AES_decrypt;
2322 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2323 } else
5158c763
MC
2324#endif
2325#ifdef VPAES_CAPABLE
0f113f3e 2326 if (VPAES_CAPABLE) {
6435f0f6
RL
2327 ret = vpaes_set_decrypt_key(key,
2328 EVP_CIPHER_CTX_key_length(ctx) * 8,
2329 &dat->ks.ks);
0f113f3e
MC
2330 dat->block = (block128_f) vpaes_decrypt;
2331 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2332 (cbc128_f) vpaes_cbc_encrypt : NULL;
2333 } else
5158c763 2334#endif
0f113f3e 2335 {
6435f0f6
RL
2336 ret = AES_set_decrypt_key(key,
2337 EVP_CIPHER_CTX_key_length(ctx) * 8,
2338 &dat->ks.ks);
0f113f3e
MC
2339 dat->block = (block128_f) AES_decrypt;
2340 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2341 (cbc128_f) AES_cbc_encrypt : NULL;
c01a3c6d 2342 }
0f113f3e 2343 } else
5158c763 2344#ifdef HWAES_CAPABLE
0f113f3e 2345 if (HWAES_CAPABLE) {
6435f0f6
RL
2346 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2347 &dat->ks.ks);
0f113f3e
MC
2348 dat->block = (block128_f) HWAES_encrypt;
2349 dat->stream.cbc = NULL;
5158c763 2350# ifdef HWAES_cbc_encrypt
0f113f3e
MC
2351 if (mode == EVP_CIPH_CBC_MODE)
2352 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2353 else
5158c763
MC
2354# endif
2355# ifdef HWAES_ctr32_encrypt_blocks
0f113f3e
MC
2356 if (mode == EVP_CIPH_CTR_MODE)
2357 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2358 else
5158c763 2359# endif
0f113f3e
MC
2360 (void)0; /* terminate potentially open 'else' */
2361 } else
5158c763
MC
2362#endif
2363#ifdef BSAES_CAPABLE
0f113f3e 2364 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
6435f0f6
RL
2365 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2366 &dat->ks.ks);
0f113f3e
MC
2367 dat->block = (block128_f) AES_encrypt;
2368 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2369 } else
5158c763
MC
2370#endif
2371#ifdef VPAES_CAPABLE
0f113f3e 2372 if (VPAES_CAPABLE) {
6435f0f6
RL
2373 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2374 &dat->ks.ks);
0f113f3e
MC
2375 dat->block = (block128_f) vpaes_encrypt;
2376 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2377 (cbc128_f) vpaes_cbc_encrypt : NULL;
2378 } else
5158c763 2379#endif
0f113f3e 2380 {
6435f0f6
RL
2381 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2382 &dat->ks.ks);
0f113f3e
MC
2383 dat->block = (block128_f) AES_encrypt;
2384 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2385 (cbc128_f) AES_cbc_encrypt : NULL;
5158c763 2386#ifdef AES_CTR_ASM
0f113f3e
MC
2387 if (mode == EVP_CIPH_CTR_MODE)
2388 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
5158c763 2389#endif
0f113f3e 2390 }
d1fff483 2391
0f113f3e 2392 if (ret < 0) {
9311d0c4 2393 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
2394 return 0;
2395 }
d1fff483 2396
0f113f3e
MC
2397 return 1;
2398}
d1fff483 2399
0f113f3e
MC
2400static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2401 const unsigned char *in, size_t len)
17f121de 2402{
6435f0f6 2403 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2404
0f113f3e 2405 if (dat->stream.cbc)
9197c226 2406 (*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv,
6435f0f6
RL
2407 EVP_CIPHER_CTX_encrypting(ctx));
2408 else if (EVP_CIPHER_CTX_encrypting(ctx))
9197c226
BK
2409 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
2410 dat->block);
0f113f3e 2411 else
6435f0f6 2412 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
9197c226 2413 ctx->iv, dat->block);
17f121de 2414
0f113f3e 2415 return 1;
17f121de
AP
2416}
2417
0f113f3e
MC
2418static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2419 const unsigned char *in, size_t len)
17f121de 2420{
6435f0f6 2421 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
0f113f3e 2422 size_t i;
6435f0f6 2423 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
17f121de 2424
0f113f3e
MC
2425 if (len < bl)
2426 return 1;
17f121de 2427
0f113f3e
MC
2428 for (i = 0, len -= bl; i <= len; i += bl)
2429 (*dat->block) (in + i, out + i, &dat->ks);
17f121de 2430
0f113f3e 2431 return 1;
17f121de 2432}
deb2c1a1 2433
0f113f3e
MC
2434static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2435 const unsigned char *in, size_t len)
17f121de 2436{
6435f0f6 2437 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2438
6435f0f6 2439 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2440 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
9197c226 2441 ctx->iv, &num, dat->block);
6435f0f6 2442 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2443 return 1;
17f121de 2444}
deb2c1a1 2445
0f113f3e
MC
2446static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2447 const unsigned char *in, size_t len)
17f121de 2448{
6435f0f6 2449 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2450
6435f0f6 2451 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2452 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
9197c226 2453 ctx->iv, &num,
6435f0f6
RL
2454 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2455 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2456 return 1;
17f121de
AP
2457}
2458
0f113f3e
MC
2459static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2460 const unsigned char *in, size_t len)
17f121de 2461{
6435f0f6 2462 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2463
6435f0f6 2464 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2465 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
9197c226 2466 ctx->iv, &num,
6435f0f6
RL
2467 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2468 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2469 return 1;
17f121de 2470}
8d1ebe0b 2471
0f113f3e
MC
2472static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2473 const unsigned char *in, size_t len)
17f121de 2474{
6435f0f6 2475 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2476
6435f0f6
RL
2477 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2478 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2479 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
9197c226 2480 ctx->iv, &num,
6435f0f6
RL
2481 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2482 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e
MC
2483 return 1;
2484 }
2485
2486 while (len >= MAXBITCHUNK) {
6435f0f6 2487 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2488 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
9197c226 2489 ctx->iv, &num,
6435f0f6
RL
2490 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2491 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2492 len -= MAXBITCHUNK;
604e591e
BE
2493 out += MAXBITCHUNK;
2494 in += MAXBITCHUNK;
0f113f3e 2495 }
6435f0f6
RL
2496 if (len) {
2497 int num = EVP_CIPHER_CTX_num(ctx);
0f113f3e 2498 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
9197c226 2499 ctx->iv, &num,
6435f0f6
RL
2500 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2501 EVP_CIPHER_CTX_set_num(ctx, num);
2502 }
0f113f3e
MC
2503
2504 return 1;
17f121de 2505}
8d1ebe0b 2506
0f113f3e
MC
2507static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2508 const unsigned char *in, size_t len)
d976f992 2509{
6435f0f6
RL
2510 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2511 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e
MC
2512
2513 if (dat->stream.ctr)
2514 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
9197c226 2515 ctx->iv,
6435f0f6
RL
2516 EVP_CIPHER_CTX_buf_noconst(ctx),
2517 &num, dat->stream.ctr);
0f113f3e
MC
2518 else
2519 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
9197c226 2520 ctx->iv,
6435f0f6
RL
2521 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2522 dat->block);
2523 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2524 return 1;
d976f992
AP
2525}
2526
0f113f3e
MC
2527BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2528 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2529 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
bdaa5415
DSH
2530
2531static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
0f113f3e 2532{
6435f0f6 2533 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
273a0218
BE
2534 if (gctx == NULL)
2535 return 0;
0f113f3e 2536 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
9197c226 2537 if (gctx->iv != c->iv)
0f113f3e
MC
2538 OPENSSL_free(gctx->iv);
2539 return 1;
2540}
bdaa5415
DSH
2541
2542static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 2543{
6435f0f6 2544 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
0f113f3e
MC
2545 switch (type) {
2546 case EVP_CTRL_INIT:
2547 gctx->key_set = 0;
2548 gctx->iv_set = 0;
7dddf2fc 2549 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
c5307d9c 2550 gctx->iv = c->iv;
0f113f3e
MC
2551 gctx->taglen = -1;
2552 gctx->iv_gen = 0;
2553 gctx->tls_aad_len = -1;
2554 return 1;
2555
7dddf2fc
SL
2556 case EVP_CTRL_GET_IVLEN:
2557 *(int *)ptr = gctx->ivlen;
2558 return 1;
2559
e640fa02 2560 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e
MC
2561 if (arg <= 0)
2562 return 0;
2563 /* Allocate memory for IV if needed */
2564 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
c5307d9c 2565 if (gctx->iv != c->iv)
0f113f3e 2566 OPENSSL_free(gctx->iv);
cdb10bae 2567 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
9311d0c4 2568 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
0f113f3e 2569 return 0;
cdb10bae 2570 }
0f113f3e
MC
2571 }
2572 gctx->ivlen = arg;
2573 return 1;
2574
e640fa02 2575 case EVP_CTRL_AEAD_SET_TAG:
c5307d9c 2576 if (arg <= 0 || arg > 16 || c->encrypt)
0f113f3e 2577 return 0;
c5307d9c 2578 memcpy(c->buf, ptr, arg);
0f113f3e
MC
2579 gctx->taglen = arg;
2580 return 1;
2581
e640fa02 2582 case EVP_CTRL_AEAD_GET_TAG:
c5307d9c 2583 if (arg <= 0 || arg > 16 || !c->encrypt
6435f0f6 2584 || gctx->taglen < 0)
0f113f3e 2585 return 0;
c5307d9c 2586 memcpy(ptr, c->buf, arg);
0f113f3e
MC
2587 return 1;
2588
2589 case EVP_CTRL_GCM_SET_IV_FIXED:
2590 /* Special case: -1 length restores whole IV */
2591 if (arg == -1) {
2592 memcpy(gctx->iv, ptr, gctx->ivlen);
2593 gctx->iv_gen = 1;
2594 return 1;
2595 }
2596 /*
2597 * Fixed field must be at least 4 bytes and invocation field at least
2598 * 8.
2599 */
2600 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2601 return 0;
2602 if (arg)
2603 memcpy(gctx->iv, ptr, arg);
c5307d9c 2604 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
16cfc2c9 2605 return 0;
0f113f3e
MC
2606 gctx->iv_gen = 1;
2607 return 1;
2608
2609 case EVP_CTRL_GCM_IV_GEN:
2610 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2611 return 0;
2612 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2613 if (arg <= 0 || arg > gctx->ivlen)
2614 arg = gctx->ivlen;
2615 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2616 /*
2617 * Invocation field will be at least 8 bytes in size and so no need
2618 * to check wrap around or increment more than last 8 bytes.
2619 */
2620 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2621 gctx->iv_set = 1;
2622 return 1;
2623
2624 case EVP_CTRL_GCM_SET_IV_INV:
c5307d9c 2625 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
0f113f3e
MC
2626 return 0;
2627 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2628 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2629 gctx->iv_set = 1;
2630 return 1;
2631
2632 case EVP_CTRL_AEAD_TLS1_AAD:
2633 /* Save the AAD for later use */
c8269881 2634 if (arg != EVP_AEAD_TLS1_AAD_LEN)
0f113f3e 2635 return 0;
c5307d9c 2636 memcpy(c->buf, ptr, arg);
0f113f3e 2637 gctx->tls_aad_len = arg;
d6b34570 2638 gctx->tls_enc_records = 0;
0f113f3e 2639 {
c5307d9c 2640 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
0f113f3e 2641 /* Correct length for explicit IV */
2198b3a5
AP
2642 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2643 return 0;
0f113f3e
MC
2644 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2645 /* If decrypting correct for tag too */
c5307d9c 2646 if (!c->encrypt) {
2198b3a5
AP
2647 if (len < EVP_GCM_TLS_TAG_LEN)
2648 return 0;
0f113f3e 2649 len -= EVP_GCM_TLS_TAG_LEN;
2198b3a5 2650 }
c5307d9c
AP
2651 c->buf[arg - 2] = len >> 8;
2652 c->buf[arg - 1] = len & 0xff;
0f113f3e
MC
2653 }
2654 /* Extra padding: tag appended to record */
2655 return EVP_GCM_TLS_TAG_LEN;
2656
2657 case EVP_CTRL_COPY:
2658 {
2659 EVP_CIPHER_CTX *out = ptr;
6435f0f6 2660 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
0f113f3e
MC
2661 if (gctx->gcm.key) {
2662 if (gctx->gcm.key != &gctx->ks)
2663 return 0;
2664 gctx_out->gcm.key = &gctx_out->ks;
2665 }
c5307d9c
AP
2666 if (gctx->iv == c->iv)
2667 gctx_out->iv = out->iv;
0f113f3e 2668 else {
cdb10bae 2669 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
9311d0c4 2670 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
0f113f3e 2671 return 0;
cdb10bae 2672 }
0f113f3e
MC
2673 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2674 }
2675 return 1;
2676 }
2677
2678 default:
2679 return -1;
2680
2681 }
2682}
bdaa5415
DSH
2683
2684static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
2685 const unsigned char *iv, int enc)
2686{
6435f0f6 2687 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
2688 if (!iv && !key)
2689 return 1;
2690 if (key) {
2691 do {
5158c763 2692#ifdef HWAES_CAPABLE
0f113f3e 2693 if (HWAES_CAPABLE) {
c5307d9c 2694 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2695 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2696 (block128_f) HWAES_encrypt);
5158c763 2697# ifdef HWAES_ctr32_encrypt_blocks
0f113f3e 2698 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
5158c763 2699# else
0f113f3e 2700 gctx->ctr = NULL;
5158c763 2701# endif
0f113f3e
MC
2702 break;
2703 } else
5158c763
MC
2704#endif
2705#ifdef BSAES_CAPABLE
0f113f3e 2706 if (BSAES_CAPABLE) {
c5307d9c 2707 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2708 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2709 (block128_f) AES_encrypt);
2710 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2711 break;
2712 } else
5158c763
MC
2713#endif
2714#ifdef VPAES_CAPABLE
0f113f3e 2715 if (VPAES_CAPABLE) {
c5307d9c 2716 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2717 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2718 (block128_f) vpaes_encrypt);
2719 gctx->ctr = NULL;
2720 break;
2721 } else
5158c763 2722#endif
0f113f3e
MC
2723 (void)0; /* terminate potentially open 'else' */
2724
c5307d9c 2725 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
0f113f3e
MC
2726 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2727 (block128_f) AES_encrypt);
5158c763 2728#ifdef AES_CTR_ASM
0f113f3e 2729 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
5158c763 2730#else
0f113f3e 2731 gctx->ctr = NULL;
5158c763 2732#endif
0f113f3e
MC
2733 } while (0);
2734
2735 /*
2736 * If we have an iv can set it directly, otherwise use saved IV.
2737 */
2738 if (iv == NULL && gctx->iv_set)
2739 iv = gctx->iv;
2740 if (iv) {
2741 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2742 gctx->iv_set = 1;
2743 }
2744 gctx->key_set = 1;
2745 } else {
2746 /* If key set use IV, otherwise copy */
2747 if (gctx->key_set)
2748 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2749 else
2750 memcpy(gctx->iv, iv, gctx->ivlen);
2751 gctx->iv_set = 1;
2752 gctx->iv_gen = 0;
2753 }
2754 return 1;
2755}
2756
2757/*
2758 * Handle TLS GCM packet format. This consists of the last portion of the IV
28dd49fa
DSH
2759 * followed by the payload and finally the tag. On encrypt generate IV,
2760 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2761 * and verify tag.
2762 */
2763
2764static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
2765 const unsigned char *in, size_t len)
2766{
6435f0f6 2767 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
2768 int rv = -1;
2769 /* Encrypt/decrypt must be performed in place */
2770 if (out != in
2771 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2772 return -1;
df443918 2773
d6b34570
P
2774 /*
2775 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2776 * Requirements from SP 800-38D". The requirements is for one party to the
2777 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
2778 * side only.
2779 */
2780 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
9311d0c4 2781 ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
d6b34570
P
2782 goto err;
2783 }
2784
0f113f3e
MC
2785 /*
2786 * Set IV from start of buffer or generate IV and write to start of
2787 * buffer.
2788 */
c5307d9c
AP
2789 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
2790 : EVP_CTRL_GCM_SET_IV_INV,
0f113f3e
MC
2791 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2792 goto err;
2793 /* Use saved AAD */
c5307d9c 2794 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
0f113f3e
MC
2795 goto err;
2796 /* Fix buffer and length to point to payload */
2797 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2798 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2799 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
c5307d9c 2800 if (ctx->encrypt) {
0f113f3e
MC
2801 /* Encrypt payload */
2802 if (gctx->ctr) {
2803 size_t bulk = 0;
5158c763 2804#if defined(AES_GCM_ASM)
0f113f3e
MC
2805 if (len >= 32 && AES_GCM_ASM(gctx)) {
2806 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2807 return -1;
2808
2809 bulk = AES_gcm_encrypt(in, out, len,
2810 gctx->gcm.key,
2811 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2812 gctx->gcm.len.u[1] += bulk;
2813 }
5158c763 2814#endif
0f113f3e
MC
2815 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2816 in + bulk,
2817 out + bulk,
2818 len - bulk, gctx->ctr))
2819 goto err;
2820 } else {
2821 size_t bulk = 0;
5158c763 2822#if defined(AES_GCM_ASM2)
0f113f3e
MC
2823 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2824 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2825 return -1;
2826
2827 bulk = AES_gcm_encrypt(in, out, len,
2828 gctx->gcm.key,
2829 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2830 gctx->gcm.len.u[1] += bulk;
2831 }
5158c763 2832#endif
0f113f3e
MC
2833 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2834 in + bulk, out + bulk, len - bulk))
2835 goto err;
2836 }
2837 out += len;
2838 /* Finally write tag */
2839 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2840 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2841 } else {
2842 /* Decrypt */
2843 if (gctx->ctr) {
2844 size_t bulk = 0;
5158c763 2845#if defined(AES_GCM_ASM)
0f113f3e
MC
2846 if (len >= 16 && AES_GCM_ASM(gctx)) {
2847 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2848 return -1;
2849
2850 bulk = AES_gcm_decrypt(in, out, len,
2851 gctx->gcm.key,
2852 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2853 gctx->gcm.len.u[1] += bulk;
2854 }
5158c763 2855#endif
0f113f3e
MC
2856 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2857 in + bulk,
2858 out + bulk,
2859 len - bulk, gctx->ctr))
2860 goto err;
2861 } else {
2862 size_t bulk = 0;
5158c763 2863#if defined(AES_GCM_ASM2)
0f113f3e
MC
2864 if (len >= 16 && AES_GCM_ASM2(gctx)) {
2865 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2866 return -1;
2867
2868 bulk = AES_gcm_decrypt(in, out, len,
2869 gctx->gcm.key,
2870 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2871 gctx->gcm.len.u[1] += bulk;
2872 }
5158c763 2873#endif
0f113f3e
MC
2874 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2875 in + bulk, out + bulk, len - bulk))
2876 goto err;
2877 }
2878 /* Retrieve tag */
c5307d9c 2879 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
0f113f3e 2880 /* If tag mismatch wipe buffer */
c5307d9c 2881 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
0f113f3e
MC
2882 OPENSSL_cleanse(out, len);
2883 goto err;
2884 }
2885 rv = len;
2886 }
2887
2888 err:
2889 gctx->iv_set = 0;
2890 gctx->tls_aad_len = -1;
2891 return rv;
2892}
28dd49fa 2893
f844f9eb 2894#ifdef FIPS_MODULE
bcf082d1
SL
2895/*
2896 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
2897 *
2898 * See also 8.2.2 RBG-based construction.
2899 * Random construction consists of a free field (which can be NULL) and a
2900 * random field which will use a DRBG that can return at least 96 bits of
2901 * entropy strength. (The DRBG must be seeded by the FIPS module).
2902 */
2903static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
2904{
2905 int sz = gctx->ivlen - offset;
2906
2907 /* Must be at least 96 bits */
2908 if (sz <= 0 || gctx->ivlen < 12)
2909 return 0;
2910
2911 /* Use DRBG to generate random iv */
2912 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
2913 return 0;
2914 return 1;
2915}
f844f9eb 2916#endif /* FIPS_MODULE */
bcf082d1 2917
17f121de 2918static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
2919 const unsigned char *in, size_t len)
2920{
6435f0f6 2921 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
bcf082d1 2922
0f113f3e
MC
2923 /* If not set up, return error */
2924 if (!gctx->key_set)
2925 return -1;
2926
2927 if (gctx->tls_aad_len >= 0)
2928 return aes_gcm_tls_cipher(ctx, out, in, len);
2929
f844f9eb 2930#ifdef FIPS_MODULE
bcf082d1
SL
2931 /*
2932 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
2933 * The IV can still be set externally (the security policy will state that
2934 * this is not FIPS compliant). There are some applications
2935 * where setting the IV externally is the only option available.
2936 */
2937 if (!gctx->iv_set) {
2938 if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
2939 return -1;
2940 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2941 gctx->iv_set = 1;
2942 gctx->iv_gen_rand = 1;
2943 }
2944#else
0f113f3e
MC
2945 if (!gctx->iv_set)
2946 return -1;
f844f9eb 2947#endif /* FIPS_MODULE */
bcf082d1 2948
0f113f3e
MC
2949 if (in) {
2950 if (out == NULL) {
2951 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2952 return -1;
c5307d9c 2953 } else if (ctx->encrypt) {
0f113f3e
MC
2954 if (gctx->ctr) {
2955 size_t bulk = 0;
5158c763 2956#if defined(AES_GCM_ASM)
0f113f3e
MC
2957 if (len >= 32 && AES_GCM_ASM(gctx)) {
2958 size_t res = (16 - gctx->gcm.mres) % 16;
2959
2960 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2961 return -1;
2962
2963 bulk = AES_gcm_encrypt(in + res,
2964 out + res, len - res,
2965 gctx->gcm.key, gctx->gcm.Yi.c,
2966 gctx->gcm.Xi.u);
2967 gctx->gcm.len.u[1] += bulk;
2968 bulk += res;
2969 }
5158c763 2970#endif
0f113f3e
MC
2971 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2972 in + bulk,
2973 out + bulk,
2974 len - bulk, gctx->ctr))
2975 return -1;
2976 } else {
2977 size_t bulk = 0;
5158c763 2978#if defined(AES_GCM_ASM2)
0f113f3e
MC
2979 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2980 size_t res = (16 - gctx->gcm.mres) % 16;
2981
2982 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2983 return -1;
2984
2985 bulk = AES_gcm_encrypt(in + res,
2986 out + res, len - res,
2987 gctx->gcm.key, gctx->gcm.Yi.c,
2988 gctx->gcm.Xi.u);
2989 gctx->gcm.len.u[1] += bulk;
2990 bulk += res;
2991 }
5158c763 2992#endif
0f113f3e
MC
2993 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2994 in + bulk, out + bulk, len - bulk))
2995 return -1;
2996 }
2997 } else {
2998 if (gctx->ctr) {
2999 size_t bulk = 0;
5158c763 3000#if defined(AES_GCM_ASM)
0f113f3e
MC
3001 if (len >= 16 && AES_GCM_ASM(gctx)) {
3002 size_t res = (16 - gctx->gcm.mres) % 16;
3003
3004 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3005 return -1;
3006
3007 bulk = AES_gcm_decrypt(in + res,
3008 out + res, len - res,
3009 gctx->gcm.key,
3010 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3011 gctx->gcm.len.u[1] += bulk;
3012 bulk += res;
3013 }
5158c763 3014#endif
0f113f3e
MC
3015 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3016 in + bulk,
3017 out + bulk,
3018 len - bulk, gctx->ctr))
3019 return -1;
3020 } else {
3021 size_t bulk = 0;
5158c763 3022#if defined(AES_GCM_ASM2)
0f113f3e
MC
3023 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3024 size_t res = (16 - gctx->gcm.mres) % 16;
3025
3026 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3027 return -1;
3028
3029 bulk = AES_gcm_decrypt(in + res,
3030 out + res, len - res,
3031 gctx->gcm.key,
3032 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3033 gctx->gcm.len.u[1] += bulk;
3034 bulk += res;
3035 }
5158c763 3036#endif
0f113f3e
MC
3037 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3038 in + bulk, out + bulk, len - bulk))
3039 return -1;
3040 }
3041 }
3042 return len;
3043 } else {
c5307d9c 3044 if (!ctx->encrypt) {
0f113f3e
MC
3045 if (gctx->taglen < 0)
3046 return -1;
c5307d9c 3047 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
0f113f3e
MC
3048 return -1;
3049 gctx->iv_set = 0;
3050 return 0;
3051 }
c5307d9c 3052 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
0f113f3e
MC
3053 gctx->taglen = 16;
3054 /* Don't reuse the IV */
3055 gctx->iv_set = 0;
3056 return 0;
3057 }
3058
3059}
3060
5158c763 3061#define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
0f113f3e
MC
3062 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3063 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
7dddf2fc 3064 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
0f113f3e
MC
3065
3066BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3067 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3068 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3069 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3070 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3071 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
32a2d8dd
DSH
3072
3073static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3074{
2c840201
P
3075 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3076
0f113f3e
MC
3077 if (type == EVP_CTRL_COPY) {
3078 EVP_CIPHER_CTX *out = ptr;
6435f0f6 3079 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
2c840201 3080
0f113f3e
MC
3081 if (xctx->xts.key1) {
3082 if (xctx->xts.key1 != &xctx->ks1)
3083 return 0;
3084 xctx_out->xts.key1 = &xctx_out->ks1;
3085 }
3086 if (xctx->xts.key2) {
3087 if (xctx->xts.key2 != &xctx->ks2)
3088 return 0;
3089 xctx_out->xts.key2 = &xctx_out->ks2;
3090 }
3091 return 1;
3092 } else if (type != EVP_CTRL_INIT)
3093 return -1;
3094 /* key1 and key2 are used as an indicator both key and IV are set */
3095 xctx->xts.key1 = NULL;
3096 xctx->xts.key2 = NULL;
3097 return 1;
3098}
32a2d8dd
DSH
3099
3100static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3101 const unsigned char *iv, int enc)
3102{
6435f0f6 3103 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 3104
0f113f3e
MC
3105 if (!iv && !key)
3106 return 1;
3107
3538b0f7 3108 if (key) {
0f113f3e 3109 do {
3538b0f7
P
3110 /* The key is two half length keys in reality */
3111 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3112 const int bits = bytes * 8;
3113
3114 /*
3115 * Verify that the two keys are different.
3116 *
3117 * This addresses the vulnerability described in Rogaway's
3118 * September 2004 paper:
3119 *
3120 * "Efficient Instantiations of Tweakable Blockciphers and
3121 * Refinements to Modes OCB and PMAC".
3122 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3123 *
3124 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3125 * that:
3126 * "The check for Key_1 != Key_2 shall be done at any place
3127 * BEFORE using the keys in the XTS-AES algorithm to process
3128 * data with them."
3129 */
2c840201
P
3130 if ((!allow_insecure_decrypt || enc)
3131 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 3132 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
3133 return 0;
3134 }
3135
5158c763 3136#ifdef AES_XTS_ASM
0f113f3e 3137 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
5158c763 3138#else
0f113f3e 3139 xctx->stream = NULL;
5158c763 3140#endif
0f113f3e 3141 /* key_len is two AES keys */
5158c763 3142#ifdef HWAES_CAPABLE
0f113f3e
MC
3143 if (HWAES_CAPABLE) {
3144 if (enc) {
3538b0f7 3145 HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e 3146 xctx->xts.block1 = (block128_f) HWAES_encrypt;
46f047d7
AP
3147# ifdef HWAES_xts_encrypt
3148 xctx->stream = HWAES_xts_encrypt;
3149# endif
0f113f3e 3150 } else {
3538b0f7 3151 HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e 3152 xctx->xts.block1 = (block128_f) HWAES_decrypt;
46f047d7
AP
3153# ifdef HWAES_xts_decrypt
3154 xctx->stream = HWAES_xts_decrypt;
3155#endif
0f113f3e
MC
3156 }
3157
3538b0f7 3158 HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3159 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3160
3161 xctx->xts.key1 = &xctx->ks1;
3162 break;
3163 } else
5158c763
MC
3164#endif
3165#ifdef BSAES_CAPABLE
0f113f3e
MC
3166 if (BSAES_CAPABLE)
3167 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3168 else
5158c763
MC
3169#endif
3170#ifdef VPAES_CAPABLE
0f113f3e
MC
3171 if (VPAES_CAPABLE) {
3172 if (enc) {
3538b0f7 3173 vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3174 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3175 } else {
3538b0f7 3176 vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3177 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3178 }
3179
3538b0f7 3180 vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3181 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3182
3183 xctx->xts.key1 = &xctx->ks1;
3184 break;
3185 } else
5158c763 3186#endif
0f113f3e
MC
3187 (void)0; /* terminate potentially open 'else' */
3188
3189 if (enc) {
3538b0f7 3190 AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3191 xctx->xts.block1 = (block128_f) AES_encrypt;
3192 } else {
3538b0f7 3193 AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3194 xctx->xts.block1 = (block128_f) AES_decrypt;
3195 }
3196
3538b0f7 3197 AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3198 xctx->xts.block2 = (block128_f) AES_encrypt;
3199
3200 xctx->xts.key1 = &xctx->ks1;
3201 } while (0);
3538b0f7 3202 }
0f113f3e
MC
3203
3204 if (iv) {
3205 xctx->xts.key2 = &xctx->ks2;
9197c226 3206 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
3207 }
3208
3209 return 1;
3210}
32a2d8dd 3211
17f121de 3212static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3213 const unsigned char *in, size_t len)
3214{
6435f0f6 3215 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
95eda4f0
P
3216
3217 if (xctx->xts.key1 == NULL
3218 || xctx->xts.key2 == NULL
3219 || out == NULL
3220 || in == NULL
3221 || len < AES_BLOCK_SIZE)
0f113f3e 3222 return 0;
95eda4f0 3223
5516c19b 3224 /*
79c44b4e 3225 * Impose a limit of 2^20 blocks per data unit as specified by
5516c19b
P
3226 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3227 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3228 * NIST SP 800-38E mandates the same limit.
3229 */
3230 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
9311d0c4 3231 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
5516c19b
P
3232 return 0;
3233 }
3234
0f113f3e
MC
3235 if (xctx->stream)
3236 (*xctx->stream) (in, out, len,
6435f0f6 3237 xctx->xts.key1, xctx->xts.key2,
9197c226
BK
3238 ctx->iv);
3239 else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
6435f0f6 3240 EVP_CIPHER_CTX_encrypting(ctx)))
0f113f3e
MC
3241 return 0;
3242 return 1;
3243}
3244
5158c763 3245#define aes_xts_cleanup NULL
0f113f3e 3246
5158c763 3247#define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
0f113f3e
MC
3248 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3249 | EVP_CIPH_CUSTOM_COPY)
3250
3251BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3252 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
23916810
DSH
3253
3254static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3255{
6435f0f6 3256 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
0f113f3e
MC
3257 switch (type) {
3258 case EVP_CTRL_INIT:
3259 cctx->key_set = 0;
3260 cctx->iv_set = 0;
3261 cctx->L = 8;
3262 cctx->M = 12;
3263 cctx->tag_set = 0;
3264 cctx->len_set = 0;
e75c5a79
DSH
3265 cctx->tls_aad_len = -1;
3266 return 1;
3267
7dddf2fc
SL
3268 case EVP_CTRL_GET_IVLEN:
3269 *(int *)ptr = 15 - cctx->L;
3270 return 1;
3271
e75c5a79
DSH
3272 case EVP_CTRL_AEAD_TLS1_AAD:
3273 /* Save the AAD for later use */
3274 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3275 return 0;
6435f0f6 3276 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
e75c5a79
DSH
3277 cctx->tls_aad_len = arg;
3278 {
6435f0f6
RL
3279 uint16_t len =
3280 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3281 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
e75c5a79 3282 /* Correct length for explicit IV */
2198b3a5
AP
3283 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3284 return 0;
e75c5a79
DSH
3285 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3286 /* If decrypting correct for tag too */
2198b3a5
AP
3287 if (!EVP_CIPHER_CTX_encrypting(c)) {
3288 if (len < cctx->M)
3289 return 0;
e75c5a79 3290 len -= cctx->M;
2198b3a5 3291 }
6435f0f6
RL
3292 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3293 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
e75c5a79
DSH
3294 }
3295 /* Extra padding: tag appended to record */
3296 return cctx->M;
3297
3298 case EVP_CTRL_CCM_SET_IV_FIXED:
3299 /* Sanity check length */
3300 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3301 return 0;
3302 /* Just copy to first part of IV */
9197c226 3303 memcpy(c->iv, ptr, arg);
0f113f3e
MC
3304 return 1;
3305
e640fa02 3306 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e 3307 arg = 15 - arg;
018fcbec 3308 /* fall thru */
0f113f3e
MC
3309 case EVP_CTRL_CCM_SET_L:
3310 if (arg < 2 || arg > 8)
3311 return 0;
3312 cctx->L = arg;
3313 return 1;
3314
e640fa02 3315 case EVP_CTRL_AEAD_SET_TAG:
0f113f3e
MC
3316 if ((arg & 1) || arg < 4 || arg > 16)
3317 return 0;
6435f0f6 3318 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
0f113f3e
MC
3319 return 0;
3320 if (ptr) {
3321 cctx->tag_set = 1;
6435f0f6 3322 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
0f113f3e
MC
3323 }
3324 cctx->M = arg;
3325 return 1;
3326
e640fa02 3327 case EVP_CTRL_AEAD_GET_TAG:
6435f0f6 3328 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
0f113f3e
MC
3329 return 0;
3330 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3331 return 0;
3332 cctx->tag_set = 0;
3333 cctx->iv_set = 0;
3334 cctx->len_set = 0;
3335 return 1;
3336
3337 case EVP_CTRL_COPY:
3338 {
3339 EVP_CIPHER_CTX *out = ptr;
6435f0f6 3340 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
0f113f3e
MC
3341 if (cctx->ccm.key) {
3342 if (cctx->ccm.key != &cctx->ks)
3343 return 0;
3344 cctx_out->ccm.key = &cctx_out->ks;
3345 }
3346 return 1;
3347 }
3348
3349 default:
3350 return -1;
3351
3352 }
3353}
23916810
DSH
3354
3355static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3356 const unsigned char *iv, int enc)
3357{
6435f0f6 3358 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
3359 if (!iv && !key)
3360 return 1;
3361 if (key)
3362 do {
5158c763 3363#ifdef HWAES_CAPABLE
0f113f3e 3364 if (HWAES_CAPABLE) {
6435f0f6
RL
3365 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3366 &cctx->ks.ks);
0f113f3e
MC
3367
3368 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3369 &cctx->ks, (block128_f) HWAES_encrypt);
3370 cctx->str = NULL;
3371 cctx->key_set = 1;
3372 break;
3373 } else
5158c763
MC
3374#endif
3375#ifdef VPAES_CAPABLE
0f113f3e 3376 if (VPAES_CAPABLE) {
6435f0f6
RL
3377 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3378 &cctx->ks.ks);
0f113f3e
MC
3379 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3380 &cctx->ks, (block128_f) vpaes_encrypt);
3381 cctx->str = NULL;
3382 cctx->key_set = 1;
3383 break;
3384 }
5158c763 3385#endif
6435f0f6
RL
3386 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3387 &cctx->ks.ks);
0f113f3e
MC
3388 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3389 &cctx->ks, (block128_f) AES_encrypt);
3390 cctx->str = NULL;
3391 cctx->key_set = 1;
3392 } while (0);
3393 if (iv) {
9197c226 3394 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
3395 cctx->iv_set = 1;
3396 }
3397 return 1;
3398}
23916810 3399
e75c5a79
DSH
3400static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3401 const unsigned char *in, size_t len)
3402{
6435f0f6 3403 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
e75c5a79
DSH
3404 CCM128_CONTEXT *ccm = &cctx->ccm;
3405 /* Encrypt/decrypt must be performed in place */
3406 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3407 return -1;
3408 /* If encrypting set explicit IV from sequence number (start of AAD) */
6435f0f6
RL
3409 if (EVP_CIPHER_CTX_encrypting(ctx))
3410 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3411 EVP_CCM_TLS_EXPLICIT_IV_LEN);
e75c5a79 3412 /* Get rest of IV from explicit IV */
9197c226 3413 memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
6435f0f6 3414 EVP_CCM_TLS_EXPLICIT_IV_LEN);
e75c5a79
DSH
3415 /* Correct length value */
3416 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
9197c226 3417 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
6435f0f6 3418 len))
e75c5a79
DSH
3419 return -1;
3420 /* Use saved AAD */
6435f0f6 3421 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
e75c5a79
DSH
3422 /* Fix buffer to point to payload */
3423 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3424 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
6435f0f6 3425 if (EVP_CIPHER_CTX_encrypting(ctx)) {
e75c5a79
DSH
3426 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3427 cctx->str) :
3428 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3429 return -1;
3430 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3431 return -1;
3432 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3433 } else {
3434 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3435 cctx->str) :
3436 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3437 unsigned char tag[16];
3438 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3439 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3440 return len;
3441 }
3442 }
3443 OPENSSL_cleanse(out, len);
3444 return -1;
3445 }
3446}
3447
17f121de 3448static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3449 const unsigned char *in, size_t len)
3450{
6435f0f6 3451 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
3452 CCM128_CONTEXT *ccm = &cctx->ccm;
3453 /* If not set up, return error */
e75c5a79
DSH
3454 if (!cctx->key_set)
3455 return -1;
3456
3457 if (cctx->tls_aad_len >= 0)
3458 return aes_ccm_tls_cipher(ctx, out, in, len);
3459
197421b1
DSH
3460 /* EVP_*Final() doesn't return any data */
3461 if (in == NULL && out != NULL)
3462 return 0;
3463
e75c5a79 3464 if (!cctx->iv_set)
0f113f3e 3465 return -1;
e75c5a79 3466
0f113f3e
MC
3467 if (!out) {
3468 if (!in) {
9197c226 3469 if (CRYPTO_ccm128_setiv(ccm, ctx->iv,
6435f0f6 3470 15 - cctx->L, len))
0f113f3e
MC
3471 return -1;
3472 cctx->len_set = 1;
3473 return len;
3474 }
3475 /* If have AAD need message length */
3476 if (!cctx->len_set && len)
3477 return -1;
3478 CRYPTO_ccm128_aad(ccm, in, len);
3479 return len;
3480 }
67c81ec3
TN
3481
3482 /* The tag must be set before actually decrypting data */
3483 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3484 return -1;
3485
0f113f3e
MC
3486 /* If not set length yet do it */
3487 if (!cctx->len_set) {
9197c226 3488 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
0f113f3e
MC
3489 return -1;
3490 cctx->len_set = 1;
3491 }
6435f0f6 3492 if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3493 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3494 cctx->str) :
3495 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3496 return -1;
3497 cctx->tag_set = 1;
3498 return len;
3499 } else {
3500 int rv = -1;
3501 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3502 cctx->str) :
3503 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3504 unsigned char tag[16];
3505 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
6435f0f6
RL
3506 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3507 cctx->M))
0f113f3e
MC
3508 rv = len;
3509 }
3510 }
3511 if (rv == -1)
3512 OPENSSL_cleanse(out, len);
3513 cctx->iv_set = 0;
3514 cctx->tag_set = 0;
3515 cctx->len_set = 0;
3516 return rv;
3517 }
0f113f3e
MC
3518}
3519
5158c763 3520#define aes_ccm_cleanup NULL
0f113f3e 3521
e75c5a79
DSH
3522BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3523 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
f6c95e46
RS
3524BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3525 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3526BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3527 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
0f113f3e
MC
3528
3529typedef struct {
3530 union {
39147079 3531 OSSL_UNION_ALIGN;
0f113f3e
MC
3532 AES_KEY ks;
3533 } ks;
3534 /* Indicates if IV has been set */
3535 unsigned char *iv;
3536} EVP_AES_WRAP_CTX;
97cf1f6c
DSH
3537
3538static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3539 const unsigned char *iv, int enc)
3540{
6435f0f6 3541 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
0f113f3e
MC
3542 if (!iv && !key)
3543 return 1;
3544 if (key) {
6435f0f6
RL
3545 if (EVP_CIPHER_CTX_encrypting(ctx))
3546 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3547 &wctx->ks.ks);
0f113f3e 3548 else
6435f0f6
RL
3549 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3550 &wctx->ks.ks);
0f113f3e
MC
3551 if (!iv)
3552 wctx->iv = NULL;
3553 }
3554 if (iv) {
9197c226
BK
3555 memcpy(ctx->iv, iv, EVP_CIPHER_CTX_iv_length(ctx));
3556 wctx->iv = ctx->iv;
0f113f3e
MC
3557 }
3558 return 1;
3559}
97cf1f6c
DSH
3560
3561static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3562 const unsigned char *in, size_t inlen)
3563{
6435f0f6 3564 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
0f113f3e
MC
3565 size_t rv;
3566 /* AES wrap with padding has IV length of 4, without padding 8 */
3567 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3568 /* No final operation so always return zero length */
3569 if (!in)
3570 return 0;
3571 /* Input length must always be non-zero */
3572 if (!inlen)
3573 return -1;
3574 /* If decrypting need at least 16 bytes and multiple of 8 */
6435f0f6 3575 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
0f113f3e
MC
3576 return -1;
3577 /* If not padding input must be multiple of 8 */
3578 if (!pad && inlen & 0x7)
3579 return -1;
6d777689 3580 if (ossl_is_partially_overlapping(out, in, inlen)) {
9311d0c4 3581 ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
7141ba31
MC
3582 return 0;
3583 }
0f113f3e 3584 if (!out) {
6435f0f6 3585 if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3586 /* If padding round up to multiple of 8 */
3587 if (pad)
3588 inlen = (inlen + 7) / 8 * 8;
3589 /* 8 byte prefix */
3590 return inlen + 8;
3591 } else {
3592 /*
3593 * If not padding output will be exactly 8 bytes smaller than
3594 * input. If padding it will be at least 8 bytes smaller but we
3595 * don't know how much.
3596 */
3597 return inlen - 8;
3598 }
3599 }
3600 if (pad) {
6435f0f6 3601 if (EVP_CIPHER_CTX_encrypting(ctx))
0f113f3e
MC
3602 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3603 out, in, inlen,
3604 (block128_f) AES_encrypt);
3605 else
3606 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3607 out, in, inlen,
3608 (block128_f) AES_decrypt);
3609 } else {
6435f0f6 3610 if (EVP_CIPHER_CTX_encrypting(ctx))
0f113f3e
MC
3611 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3612 out, in, inlen, (block128_f) AES_encrypt);
3613 else
3614 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3615 out, in, inlen, (block128_f) AES_decrypt);
3616 }
3617 return rv ? (int)rv : -1;
3618}
3619
5158c763 3620#define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
0f113f3e
MC
3621 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3622 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
97cf1f6c
DSH
3623
3624static const EVP_CIPHER aes_128_wrap = {
0f113f3e 3625 NID_id_aes128_wrap,
f6c95e46 3626 8, 16, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3627 aes_wrap_init_key, aes_wrap_cipher,
3628 NULL,
3629 sizeof(EVP_AES_WRAP_CTX),
3630 NULL, NULL, NULL, NULL
3631};
97cf1f6c
DSH
3632
3633const EVP_CIPHER *EVP_aes_128_wrap(void)
0f113f3e
MC
3634{
3635 return &aes_128_wrap;
3636}
97cf1f6c
DSH
3637
3638static const EVP_CIPHER aes_192_wrap = {
0f113f3e 3639 NID_id_aes192_wrap,
f6c95e46 3640 8, 24, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3641 aes_wrap_init_key, aes_wrap_cipher,
3642 NULL,
3643 sizeof(EVP_AES_WRAP_CTX),
3644 NULL, NULL, NULL, NULL
3645};
97cf1f6c
DSH
3646
3647const EVP_CIPHER *EVP_aes_192_wrap(void)
0f113f3e
MC
3648{
3649 return &aes_192_wrap;
3650}
97cf1f6c
DSH
3651
3652static const EVP_CIPHER aes_256_wrap = {
0f113f3e 3653 NID_id_aes256_wrap,
f6c95e46 3654 8, 32, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3655 aes_wrap_init_key, aes_wrap_cipher,
3656 NULL,
3657 sizeof(EVP_AES_WRAP_CTX),
3658 NULL, NULL, NULL, NULL
3659};
97cf1f6c
DSH
3660
3661const EVP_CIPHER *EVP_aes_256_wrap(void)
0f113f3e
MC
3662{
3663 return &aes_256_wrap;
3664}
97cf1f6c 3665
d31fed73 3666static const EVP_CIPHER aes_128_wrap_pad = {
0f113f3e 3667 NID_id_aes128_wrap_pad,
f6c95e46 3668 8, 16, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3669 aes_wrap_init_key, aes_wrap_cipher,
3670 NULL,
3671 sizeof(EVP_AES_WRAP_CTX),
3672 NULL, NULL, NULL, NULL
3673};
d31fed73
DSH
3674
3675const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
0f113f3e
MC
3676{
3677 return &aes_128_wrap_pad;
3678}
d31fed73
DSH
3679
3680static const EVP_CIPHER aes_192_wrap_pad = {
0f113f3e 3681 NID_id_aes192_wrap_pad,
f6c95e46 3682 8, 24, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3683 aes_wrap_init_key, aes_wrap_cipher,
3684 NULL,
3685 sizeof(EVP_AES_WRAP_CTX),
3686 NULL, NULL, NULL, NULL
3687};
d31fed73
DSH
3688
3689const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
0f113f3e
MC
3690{
3691 return &aes_192_wrap_pad;
3692}
d31fed73
DSH
3693
3694static const EVP_CIPHER aes_256_wrap_pad = {
0f113f3e 3695 NID_id_aes256_wrap_pad,
f6c95e46 3696 8, 32, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3697 aes_wrap_init_key, aes_wrap_cipher,
3698 NULL,
3699 sizeof(EVP_AES_WRAP_CTX),
3700 NULL, NULL, NULL, NULL
3701};
d31fed73
DSH
3702
3703const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
0f113f3e
MC
3704{
3705 return &aes_256_wrap_pad;
3706}
d31fed73 3707
5158c763 3708#ifndef OPENSSL_NO_OCB
e6b336ef 3709static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3710{
6435f0f6 3711 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
0f113f3e
MC
3712 EVP_CIPHER_CTX *newc;
3713 EVP_AES_OCB_CTX *new_octx;
3714
3715 switch (type) {
3716 case EVP_CTRL_INIT:
3717 octx->key_set = 0;
3718 octx->iv_set = 0;
7dddf2fc 3719 octx->ivlen = EVP_CIPHER_iv_length(c->cipher);
9197c226 3720 octx->iv = c->iv;
0f113f3e
MC
3721 octx->taglen = 16;
3722 octx->data_buf_len = 0;
3723 octx->aad_buf_len = 0;
3724 return 1;
3725
7dddf2fc
SL
3726 case EVP_CTRL_GET_IVLEN:
3727 *(int *)ptr = octx->ivlen;
3728 return 1;
3729
e640fa02 3730 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e
MC
3731 /* IV len must be 1 to 15 */
3732 if (arg <= 0 || arg > 15)
3733 return 0;
3734
3735 octx->ivlen = arg;
3736 return 1;
3737
e640fa02 3738 case EVP_CTRL_AEAD_SET_TAG:
12a765a5 3739 if (ptr == NULL) {
d57d135c
MC
3740 /* Tag len must be 0 to 16 */
3741 if (arg < 0 || arg > 16)
3742 return 0;
3743
3744 octx->taglen = arg;
3745 return 1;
3746 }
6435f0f6 3747 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
0f113f3e
MC
3748 return 0;
3749 memcpy(octx->tag, ptr, arg);
3750 return 1;
3751
e640fa02 3752 case EVP_CTRL_AEAD_GET_TAG:
6435f0f6 3753 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
0f113f3e
MC
3754 return 0;
3755
3756 memcpy(ptr, octx->tag, arg);
3757 return 1;
3758
3759 case EVP_CTRL_COPY:
3760 newc = (EVP_CIPHER_CTX *)ptr;
6435f0f6 3761 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
0f113f3e 3762 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
bdc985b1
AP
3763 &new_octx->ksenc.ks,
3764 &new_octx->ksdec.ks);
0f113f3e
MC
3765
3766 default:
3767 return -1;
3768
3769 }
3770}
e6b336ef
MC
3771
3772static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3773 const unsigned char *iv, int enc)
3774{
6435f0f6 3775 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
3776 if (!iv && !key)
3777 return 1;
3778 if (key) {
3779 do {
3780 /*
3781 * We set both the encrypt and decrypt key here because decrypt
3782 * needs both. We could possibly optimise to remove setting the
3783 * decrypt for an encryption operation.
3784 */
5158c763 3785# ifdef HWAES_CAPABLE
02dc0b82 3786 if (HWAES_CAPABLE) {
6435f0f6
RL
3787 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3788 &octx->ksenc.ks);
3789 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3790 &octx->ksdec.ks);
02dc0b82
AP
3791 if (!CRYPTO_ocb128_init(&octx->ocb,
3792 &octx->ksenc.ks, &octx->ksdec.ks,
3793 (block128_f) HWAES_encrypt,
3794 (block128_f) HWAES_decrypt,
3795 enc ? HWAES_ocb_encrypt
3796 : HWAES_ocb_decrypt))
3797 return 0;
3798 break;
3799 }
5158c763
MC
3800# endif
3801# ifdef VPAES_CAPABLE
0f113f3e 3802 if (VPAES_CAPABLE) {
6435f0f6
RL
3803 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3804 &octx->ksenc.ks);
3805 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3806 &octx->ksdec.ks);
bdc985b1
AP
3807 if (!CRYPTO_ocb128_init(&octx->ocb,
3808 &octx->ksenc.ks, &octx->ksdec.ks,
3809 (block128_f) vpaes_encrypt,
bd30091c
AP
3810 (block128_f) vpaes_decrypt,
3811 NULL))
0f113f3e
MC
3812 return 0;
3813 break;
3814 }
5158c763 3815# endif
6435f0f6
RL
3816 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3817 &octx->ksenc.ks);
3818 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3819 &octx->ksdec.ks);
bdc985b1
AP
3820 if (!CRYPTO_ocb128_init(&octx->ocb,
3821 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 3822 (block128_f) AES_encrypt,
bd30091c
AP
3823 (block128_f) AES_decrypt,
3824 NULL))
0f113f3e
MC
3825 return 0;
3826 }
3827 while (0);
3828
3829 /*
3830 * If we have an iv we can set it directly, otherwise use saved IV.
3831 */
3832 if (iv == NULL && octx->iv_set)
3833 iv = octx->iv;
3834 if (iv) {
3835 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3836 != 1)
3837 return 0;
3838 octx->iv_set = 1;
3839 }
3840 octx->key_set = 1;
3841 } else {
3842 /* If key set use IV, otherwise copy */
3843 if (octx->key_set)
3844 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3845 else
3846 memcpy(octx->iv, iv, octx->ivlen);
3847 octx->iv_set = 1;
3848 }
3849 return 1;
3850}
e6b336ef
MC
3851
3852static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3853 const unsigned char *in, size_t len)
3854{
3855 unsigned char *buf;
3856 int *buf_len;
3857 int written_len = 0;
3858 size_t trailing_len;
6435f0f6 3859 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
3860
3861 /* If IV or Key not set then return error */
3862 if (!octx->iv_set)
3863 return -1;
3864
3865 if (!octx->key_set)
3866 return -1;
3867
0ba5a9ea 3868 if (in != NULL) {
0f113f3e
MC
3869 /*
3870 * Need to ensure we are only passing full blocks to low level OCB
3871 * routines. We do it here rather than in EVP_EncryptUpdate/
3872 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3873 * and those routines don't support that
3874 */
3875
3876 /* Are we dealing with AAD or normal data here? */
3877 if (out == NULL) {
3878 buf = octx->aad_buf;
3879 buf_len = &(octx->aad_buf_len);
3880 } else {
3881 buf = octx->data_buf;
3882 buf_len = &(octx->data_buf_len);
7141ba31 3883
6d777689 3884 if (ossl_is_partially_overlapping(out + *buf_len, in, len)) {
9311d0c4 3885 ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
7141ba31
MC
3886 return 0;
3887 }
0f113f3e
MC
3888 }
3889
3890 /*
3891 * If we've got a partially filled buffer from a previous call then
3892 * use that data first
3893 */
0ba5a9ea 3894 if (*buf_len > 0) {
0f113f3e
MC
3895 unsigned int remaining;
3896
0ba5a9ea 3897 remaining = AES_BLOCK_SIZE - (*buf_len);
0f113f3e
MC
3898 if (remaining > len) {
3899 memcpy(buf + (*buf_len), in, len);
3900 *(buf_len) += len;
3901 return 0;
3902 }
3903 memcpy(buf + (*buf_len), in, remaining);
3904
3905 /*
3906 * If we get here we've filled the buffer, so process it
3907 */
3908 len -= remaining;
3909 in += remaining;
3910 if (out == NULL) {
0ba5a9ea 3911 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
0f113f3e 3912 return -1;
6435f0f6 3913 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
0ba5a9ea
MC
3914 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3915 AES_BLOCK_SIZE))
0f113f3e
MC
3916 return -1;
3917 } else {
0ba5a9ea
MC
3918 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3919 AES_BLOCK_SIZE))
0f113f3e
MC
3920 return -1;
3921 }
0ba5a9ea 3922 written_len = AES_BLOCK_SIZE;
0f113f3e 3923 *buf_len = 0;
7c12c7b6
MC
3924 if (out != NULL)
3925 out += AES_BLOCK_SIZE;
0f113f3e
MC
3926 }
3927
3928 /* Do we have a partial block to handle at the end? */
0ba5a9ea 3929 trailing_len = len % AES_BLOCK_SIZE;
0f113f3e
MC
3930
3931 /*
3932 * If we've got some full blocks to handle, then process these first
3933 */
3934 if (len != trailing_len) {
3935 if (out == NULL) {
3936 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3937 return -1;
6435f0f6 3938 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3939 if (!CRYPTO_ocb128_encrypt
3940 (&octx->ocb, in, out, len - trailing_len))
3941 return -1;
3942 } else {
3943 if (!CRYPTO_ocb128_decrypt
3944 (&octx->ocb, in, out, len - trailing_len))
3945 return -1;
3946 }
3947 written_len += len - trailing_len;
3948 in += len - trailing_len;
3949 }
3950
3951 /* Handle any trailing partial block */
0ba5a9ea 3952 if (trailing_len > 0) {
0f113f3e
MC
3953 memcpy(buf, in, trailing_len);
3954 *buf_len = trailing_len;
3955 }
3956
3957 return written_len;
3958 } else {
3959 /*
3960 * First of all empty the buffer of any partial block that we might
3961 * have been provided - both for data and AAD
3962 */
0ba5a9ea 3963 if (octx->data_buf_len > 0) {
6435f0f6 3964 if (EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3965 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
3966 octx->data_buf_len))
3967 return -1;
3968 } else {
3969 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
3970 octx->data_buf_len))
3971 return -1;
3972 }
3973 written_len = octx->data_buf_len;
3974 octx->data_buf_len = 0;
3975 }
0ba5a9ea 3976 if (octx->aad_buf_len > 0) {
0f113f3e
MC
3977 if (!CRYPTO_ocb128_aad
3978 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
3979 return -1;
3980 octx->aad_buf_len = 0;
3981 }
3982 /* If decrypting then verify */
6435f0f6 3983 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
0f113f3e
MC
3984 if (octx->taglen < 0)
3985 return -1;
3986 if (CRYPTO_ocb128_finish(&octx->ocb,
3987 octx->tag, octx->taglen) != 0)
3988 return -1;
3989 octx->iv_set = 0;
3990 return written_len;
3991 }
3992 /* If encrypting then just get the tag */
3993 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
3994 return -1;
3995 /* Don't reuse the IV */
3996 octx->iv_set = 0;
3997 return written_len;
3998 }
3999}
e6b336ef
MC
4000
4001static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
0f113f3e 4002{
6435f0f6 4003 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
0f113f3e
MC
4004 CRYPTO_ocb128_cleanup(&octx->ocb);
4005 return 1;
4006}
e6b336ef 4007
c4aede20
MC
4008BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4009 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4010BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4011 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4012BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4013 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
5158c763 4014#endif /* OPENSSL_NO_OCB */