]> git.ipfire.org Git - thirdparty/openssl.git/blame - crypto/evp/e_aes.c
eckey_priv_encode(): Call ASN1_STRING_free() only on an ASN1_STRING
[thirdparty/openssl.git] / crypto / evp / e_aes.c
CommitLineData
aa6bb135 1/*
fecb3aae 2 * Copyright 2001-2022 The OpenSSL Project Authors. All Rights Reserved.
deb2c1a1 3 *
4a8b0c55 4 * Licensed under the Apache License 2.0 (the "License"). You may not use
aa6bb135
RS
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
deb2c1a1
DSH
8 */
9
c72fa255
MC
10/*
11 * This file uses the low level AES functions (which are deprecated for
12 * non-internal use) in order to implement the EVP AES ciphers.
13 */
14#include "internal/deprecated.h"
15
743694a6
MC
16#include <string.h>
17#include <assert.h>
8c84b677 18#include <openssl/opensslconf.h>
5158c763
MC
19#include <openssl/crypto.h>
20#include <openssl/evp.h>
21#include <openssl/err.h>
5158c763 22#include <openssl/aes.h>
743694a6
MC
23#include <openssl/rand.h>
24#include <openssl/cmac.h>
25f2138b 25#include "crypto/evp.h"
39147079 26#include "internal/cryptlib.h"
25f2138b
DMSP
27#include "crypto/modes.h"
28#include "crypto/siv.h"
cc731bc3 29#include "crypto/aes_platform.h"
706457b7 30#include "evp_local.h"
0f113f3e
MC
31
32typedef struct {
33 union {
39147079 34 OSSL_UNION_ALIGN;
0f113f3e
MC
35 AES_KEY ks;
36 } ks;
37 block128_f block;
38 union {
39 cbc128_f cbc;
40 ctr128_f ctr;
41 } stream;
42} EVP_AES_KEY;
43
44typedef struct {
45 union {
39147079 46 OSSL_UNION_ALIGN;
0f113f3e
MC
47 AES_KEY ks;
48 } ks; /* AES key schedule to use */
49 int key_set; /* Set if key initialised */
50 int iv_set; /* Set if an iv is set */
51 GCM128_CONTEXT gcm;
52 unsigned char *iv; /* Temporary IV store */
53 int ivlen; /* IV length */
54 int taglen;
55 int iv_gen; /* It is OK to generate IVs */
bcf082d1 56 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
0f113f3e 57 int tls_aad_len; /* TLS AAD length */
d6b34570 58 uint64_t tls_enc_records; /* Number of TLS records encrypted */
0f113f3e
MC
59 ctr128_f ctr;
60} EVP_AES_GCM_CTX;
61
62typedef struct {
63 union {
39147079 64 OSSL_UNION_ALIGN;
0f113f3e
MC
65 AES_KEY ks;
66 } ks1, ks2; /* AES key schedules to use */
67 XTS128_CONTEXT xts;
68 void (*stream) (const unsigned char *in,
69 unsigned char *out, size_t length,
70 const AES_KEY *key1, const AES_KEY *key2,
71 const unsigned char iv[16]);
72} EVP_AES_XTS_CTX;
73
f844f9eb 74#ifdef FIPS_MODULE
2c840201
P
75static const int allow_insecure_decrypt = 0;
76#else
77static const int allow_insecure_decrypt = 1;
78#endif
79
0f113f3e
MC
80typedef struct {
81 union {
39147079 82 OSSL_UNION_ALIGN;
0f113f3e
MC
83 AES_KEY ks;
84 } ks; /* AES key schedule to use */
85 int key_set; /* Set if key initialised */
86 int iv_set; /* Set if an iv is set */
87 int tag_set; /* Set if tag is valid */
88 int len_set; /* Set if message length set */
89 int L, M; /* L and M parameters from RFC3610 */
e75c5a79 90 int tls_aad_len; /* TLS AAD length */
0f113f3e
MC
91 CCM128_CONTEXT ccm;
92 ccm128_f str;
93} EVP_AES_CCM_CTX;
94
5158c763 95#ifndef OPENSSL_NO_OCB
0f113f3e 96typedef struct {
bdc985b1 97 union {
39147079 98 OSSL_UNION_ALIGN;
bdc985b1
AP
99 AES_KEY ks;
100 } ksenc; /* AES key schedule to use for encryption */
101 union {
39147079 102 OSSL_UNION_ALIGN;
bdc985b1
AP
103 AES_KEY ks;
104 } ksdec; /* AES key schedule to use for decryption */
0f113f3e
MC
105 int key_set; /* Set if key initialised */
106 int iv_set; /* Set if an iv is set */
107 OCB128_CONTEXT ocb;
108 unsigned char *iv; /* Temporary IV store */
109 unsigned char tag[16];
110 unsigned char data_buf[16]; /* Store partial data blocks */
111 unsigned char aad_buf[16]; /* Store partial AAD blocks */
112 int data_buf_len;
113 int aad_buf_len;
114 int ivlen; /* IV length */
115 int taglen;
116} EVP_AES_OCB_CTX;
5158c763 117#endif
e6b336ef 118
5158c763 119#define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
17f121de 120
03a5e5ae
PS
121/* increment counter (64-bit int) by 1 */
122static void ctr64_inc(unsigned char *counter)
123{
124 int n = 8;
125 unsigned char c;
126
127 do {
128 --n;
129 c = counter[n];
130 ++c;
131 counter[n] = c;
132 if (c)
133 return;
134 } while (n);
135}
136
459b15d4 137#if defined(AESNI_CAPABLE)
5158c763 138# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
5158c763 139# define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
0f113f3e 140 gctx->gcm.ghash==gcm_ghash_avx)
5158c763
MC
141# undef AES_GCM_ASM2 /* minor size optimization */
142# endif
4e049c52 143
17f121de 144static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
145 const unsigned char *iv, int enc)
146{
147 int ret, mode;
6435f0f6 148 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
80ce874a 149 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
0f113f3e 150
80ce874a
P
151 if (keylen <= 0) {
152 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
153 return 0;
154 }
ed576acd 155 mode = EVP_CIPHER_CTX_get_mode(ctx);
0f113f3e
MC
156 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
157 && !enc) {
80ce874a 158 ret = aesni_set_decrypt_key(key, keylen, &dat->ks.ks);
0f113f3e
MC
159 dat->block = (block128_f) aesni_decrypt;
160 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
161 (cbc128_f) aesni_cbc_encrypt : NULL;
162 } else {
80ce874a 163 ret = aesni_set_encrypt_key(key, keylen, &dat->ks.ks);
0f113f3e
MC
164 dat->block = (block128_f) aesni_encrypt;
165 if (mode == EVP_CIPH_CBC_MODE)
166 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
167 else if (mode == EVP_CIPH_CTR_MODE)
168 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
169 else
170 dat->stream.cbc = NULL;
171 }
172
173 if (ret < 0) {
9311d0c4 174 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
175 return 0;
176 }
177
178 return 1;
179}
180
181static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
182 const unsigned char *in, size_t len)
d1fff483 183{
6435f0f6 184 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
ed576acd 185 ctx->iv, EVP_CIPHER_CTX_is_encrypting(ctx));
d1fff483 186
0f113f3e 187 return 1;
d1fff483
AP
188}
189
0f113f3e
MC
190static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
191 const unsigned char *in, size_t len)
d1fff483 192{
ed576acd 193 size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
d1fff483 194
0f113f3e
MC
195 if (len < bl)
196 return 1;
d1fff483 197
6435f0f6 198 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
ed576acd 199 EVP_CIPHER_CTX_is_encrypting(ctx));
d1fff483 200
0f113f3e 201 return 1;
d1fff483
AP
202}
203
5158c763 204# define aesni_ofb_cipher aes_ofb_cipher
0f113f3e
MC
205static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
206 const unsigned char *in, size_t len);
d1fff483 207
5158c763 208# define aesni_cfb_cipher aes_cfb_cipher
0f113f3e
MC
209static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
210 const unsigned char *in, size_t len);
d1fff483 211
5158c763 212# define aesni_cfb8_cipher aes_cfb8_cipher
0f113f3e
MC
213static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
214 const unsigned char *in, size_t len);
d1fff483 215
5158c763 216# define aesni_cfb1_cipher aes_cfb1_cipher
0f113f3e
MC
217static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
218 const unsigned char *in, size_t len);
d1fff483 219
5158c763 220# define aesni_ctr_cipher aes_ctr_cipher
17f121de 221static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 222 const unsigned char *in, size_t len);
d1fff483 223
17f121de 224static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
225 const unsigned char *iv, int enc)
226{
80ce874a
P
227 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
228
229 if (iv == NULL && key == NULL)
0f113f3e 230 return 1;
80ce874a 231
0f113f3e 232 if (key) {
80ce874a
P
233 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
234
235 if (keylen <= 0) {
236 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
237 return 0;
238 }
239 aesni_set_encrypt_key(key, keylen, &gctx->ks.ks);
0f113f3e
MC
240 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
241 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
242 /*
243 * If we have an iv can set it directly, otherwise use saved IV.
244 */
245 if (iv == NULL && gctx->iv_set)
246 iv = gctx->iv;
247 if (iv) {
248 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
249 gctx->iv_set = 1;
250 }
251 gctx->key_set = 1;
252 } else {
253 /* If key set use IV, otherwise copy */
254 if (gctx->key_set)
255 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
256 else
257 memcpy(gctx->iv, iv, gctx->ivlen);
258 gctx->iv_set = 1;
259 gctx->iv_gen = 0;
260 }
261 return 1;
262}
263
5158c763 264# define aesni_gcm_cipher aes_gcm_cipher
17f121de 265static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 266 const unsigned char *in, size_t len);
17f121de
AP
267
268static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
269 const unsigned char *iv, int enc)
270{
6435f0f6 271 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 272
80ce874a 273 if (iv == NULL && key == NULL)
0f113f3e
MC
274 return 1;
275
276 if (key) {
3538b0f7 277 /* The key is two half length keys in reality */
80ce874a
P
278 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
279 const int bytes = keylen / 2;
3538b0f7
P
280 const int bits = bytes * 8;
281
80ce874a
P
282 if (keylen <= 0) {
283 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
284 return 0;
285 }
3538b0f7
P
286 /*
287 * Verify that the two keys are different.
4bd8b240 288 *
3538b0f7
P
289 * This addresses Rogaway's vulnerability.
290 * See comment in aes_xts_init_key() below.
291 */
2c840201
P
292 if ((!allow_insecure_decrypt || enc)
293 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 294 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
295 return 0;
296 }
297
0f113f3e
MC
298 /* key_len is two AES keys */
299 if (enc) {
3538b0f7 300 aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
301 xctx->xts.block1 = (block128_f) aesni_encrypt;
302 xctx->stream = aesni_xts_encrypt;
303 } else {
3538b0f7 304 aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
305 xctx->xts.block1 = (block128_f) aesni_decrypt;
306 xctx->stream = aesni_xts_decrypt;
307 }
308
3538b0f7 309 aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
310 xctx->xts.block2 = (block128_f) aesni_encrypt;
311
312 xctx->xts.key1 = &xctx->ks1;
313 }
314
315 if (iv) {
316 xctx->xts.key2 = &xctx->ks2;
9197c226 317 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
318 }
319
320 return 1;
321}
322
5158c763 323# define aesni_xts_cipher aes_xts_cipher
17f121de 324static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 325 const unsigned char *in, size_t len);
17f121de
AP
326
327static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
328 const unsigned char *iv, int enc)
329{
6435f0f6 330 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
80ce874a
P
331
332 if (iv == NULL && key == NULL)
0f113f3e 333 return 1;
80ce874a
P
334
335 if (key != NULL) {
336 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
337
338 if (keylen <= 0) {
339 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
340 return 0;
341 }
342 aesni_set_encrypt_key(key, keylen, &cctx->ks.ks);
0f113f3e
MC
343 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
344 &cctx->ks, (block128_f) aesni_encrypt);
345 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
346 (ccm128_f) aesni_ccm64_decrypt_blocks;
347 cctx->key_set = 1;
348 }
349 if (iv) {
9197c226 350 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
351 cctx->iv_set = 1;
352 }
353 return 1;
354}
355
5158c763 356# define aesni_ccm_cipher aes_ccm_cipher
17f121de 357static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 358 const unsigned char *in, size_t len);
17f121de 359
5158c763 360# ifndef OPENSSL_NO_OCB
e6b336ef 361static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
362 const unsigned char *iv, int enc)
363{
6435f0f6 364 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
80ce874a
P
365
366 if (iv == NULL && key == NULL)
0f113f3e 367 return 1;
80ce874a
P
368
369 if (key != NULL) {
370 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
371
372 if (keylen <= 0) {
373 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
374 return 0;
375 }
0f113f3e
MC
376 do {
377 /*
378 * We set both the encrypt and decrypt key here because decrypt
379 * needs both. We could possibly optimise to remove setting the
380 * decrypt for an encryption operation.
381 */
80ce874a
P
382 aesni_set_encrypt_key(key, keylen, &octx->ksenc.ks);
383 aesni_set_decrypt_key(key, keylen, &octx->ksdec.ks);
bdc985b1
AP
384 if (!CRYPTO_ocb128_init(&octx->ocb,
385 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 386 (block128_f) aesni_encrypt,
bd30091c
AP
387 (block128_f) aesni_decrypt,
388 enc ? aesni_ocb_encrypt
389 : aesni_ocb_decrypt))
0f113f3e
MC
390 return 0;
391 }
392 while (0);
393
394 /*
395 * If we have an iv we can set it directly, otherwise use saved IV.
396 */
397 if (iv == NULL && octx->iv_set)
398 iv = octx->iv;
399 if (iv) {
400 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
401 != 1)
402 return 0;
403 octx->iv_set = 1;
404 }
405 octx->key_set = 1;
406 } else {
407 /* If key set use IV, otherwise copy */
408 if (octx->key_set)
409 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
410 else
411 memcpy(octx->iv, iv, octx->ivlen);
412 octx->iv_set = 1;
413 }
414 return 1;
415}
416
5158c763 417# define aesni_ocb_cipher aes_ocb_cipher
e6b336ef 418static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 419 const unsigned char *in, size_t len);
5158c763 420# endif /* OPENSSL_NO_OCB */
e6b336ef 421
5158c763 422# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
17f121de 423static const EVP_CIPHER aesni_##keylen##_##mode = { \
0f113f3e
MC
424 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
425 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 426 EVP_ORIG_GLOBAL, \
0f113f3e
MC
427 aesni_init_key, \
428 aesni_##mode##_cipher, \
429 NULL, \
430 sizeof(EVP_AES_KEY), \
431 NULL,NULL,NULL,NULL }; \
17f121de 432static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 433 nid##_##keylen##_##nmode,blocksize, \
f6c95e46 434 keylen/8,ivlen, \
0f113f3e 435 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 436 EVP_ORIG_GLOBAL, \
0f113f3e
MC
437 aes_init_key, \
438 aes_##mode##_cipher, \
439 NULL, \
440 sizeof(EVP_AES_KEY), \
441 NULL,NULL,NULL,NULL }; \
17f121de 442const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
8ca28da0 443{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
17f121de 444
5158c763 445# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
17f121de 446static const EVP_CIPHER aesni_##keylen##_##mode = { \
0f113f3e 447 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
448 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
449 ivlen, \
0f113f3e 450 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 451 EVP_ORIG_GLOBAL, \
0f113f3e
MC
452 aesni_##mode##_init_key, \
453 aesni_##mode##_cipher, \
454 aes_##mode##_cleanup, \
455 sizeof(EVP_AES_##MODE##_CTX), \
456 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de 457static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 458 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
459 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
460 ivlen, \
0f113f3e 461 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 462 EVP_ORIG_GLOBAL, \
0f113f3e
MC
463 aes_##mode##_init_key, \
464 aes_##mode##_cipher, \
465 aes_##mode##_cleanup, \
466 sizeof(EVP_AES_##MODE##_CTX), \
467 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de 468const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
8ca28da0 469{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
d1fff483 470
459b15d4 471#elif defined(SPARC_AES_CAPABLE)
c5f6da54
AP
472
473static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
474 const unsigned char *iv, int enc)
475{
476 int ret, mode, bits;
6435f0f6 477 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 478
ed576acd
TM
479 mode = EVP_CIPHER_CTX_get_mode(ctx);
480 bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
80ce874a
P
481 if (bits <= 0) {
482 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
483 return 0;
484 }
0f113f3e
MC
485 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
486 && !enc) {
487 ret = 0;
6435f0f6 488 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
0f113f3e
MC
489 dat->block = (block128_f) aes_t4_decrypt;
490 switch (bits) {
491 case 128:
492 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
493 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
494 break;
495 case 192:
496 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
497 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
498 break;
499 case 256:
500 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
501 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
502 break;
503 default:
504 ret = -1;
505 }
506 } else {
507 ret = 0;
6435f0f6 508 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
0f113f3e
MC
509 dat->block = (block128_f) aes_t4_encrypt;
510 switch (bits) {
511 case 128:
512 if (mode == EVP_CIPH_CBC_MODE)
513 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
514 else if (mode == EVP_CIPH_CTR_MODE)
515 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
516 else
517 dat->stream.cbc = NULL;
518 break;
519 case 192:
520 if (mode == EVP_CIPH_CBC_MODE)
521 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
522 else if (mode == EVP_CIPH_CTR_MODE)
523 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
524 else
525 dat->stream.cbc = NULL;
526 break;
527 case 256:
528 if (mode == EVP_CIPH_CBC_MODE)
529 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
530 else if (mode == EVP_CIPH_CTR_MODE)
531 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
532 else
533 dat->stream.cbc = NULL;
534 break;
535 default:
536 ret = -1;
537 }
538 }
539
540 if (ret < 0) {
9311d0c4 541 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
542 return 0;
543 }
544
545 return 1;
546}
547
5158c763 548# define aes_t4_cbc_cipher aes_cbc_cipher
0f113f3e
MC
549static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
550 const unsigned char *in, size_t len);
551
5158c763 552# define aes_t4_ecb_cipher aes_ecb_cipher
0f113f3e
MC
553static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
554 const unsigned char *in, size_t len);
555
5158c763 556# define aes_t4_ofb_cipher aes_ofb_cipher
0f113f3e
MC
557static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
558 const unsigned char *in, size_t len);
559
5158c763 560# define aes_t4_cfb_cipher aes_cfb_cipher
0f113f3e
MC
561static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
562 const unsigned char *in, size_t len);
563
5158c763 564# define aes_t4_cfb8_cipher aes_cfb8_cipher
0f113f3e
MC
565static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
566 const unsigned char *in, size_t len);
567
5158c763 568# define aes_t4_cfb1_cipher aes_cfb1_cipher
0f113f3e
MC
569static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
570 const unsigned char *in, size_t len);
571
5158c763 572# define aes_t4_ctr_cipher aes_ctr_cipher
c5f6da54 573static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 574 const unsigned char *in, size_t len);
c5f6da54
AP
575
576static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
577 const unsigned char *iv, int enc)
578{
6435f0f6 579 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
80ce874a
P
580
581 if (iv == NULL && key == NULL)
0f113f3e
MC
582 return 1;
583 if (key) {
80ce874a
P
584 const int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
585
586 if (bits <= 0) {
587 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
588 return 0;
589 }
0f113f3e
MC
590 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
591 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
592 (block128_f) aes_t4_encrypt);
593 switch (bits) {
594 case 128:
595 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
596 break;
597 case 192:
598 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
599 break;
600 case 256:
601 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
602 break;
603 default:
604 return 0;
605 }
606 /*
607 * If we have an iv can set it directly, otherwise use saved IV.
608 */
609 if (iv == NULL && gctx->iv_set)
610 iv = gctx->iv;
611 if (iv) {
612 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
613 gctx->iv_set = 1;
614 }
615 gctx->key_set = 1;
616 } else {
617 /* If key set use IV, otherwise copy */
618 if (gctx->key_set)
619 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
620 else
621 memcpy(gctx->iv, iv, gctx->ivlen);
622 gctx->iv_set = 1;
623 gctx->iv_gen = 0;
624 }
625 return 1;
626}
627
5158c763 628# define aes_t4_gcm_cipher aes_gcm_cipher
c5f6da54 629static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 630 const unsigned char *in, size_t len);
c5f6da54
AP
631
632static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
633 const unsigned char *iv, int enc)
634{
6435f0f6 635 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 636
0f113f3e
MC
637 if (!iv && !key)
638 return 1;
639
640 if (key) {
3538b0f7 641 /* The key is two half length keys in reality */
80ce874a
P
642 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
643 const int bytes = keylen / 2;
3538b0f7
P
644 const int bits = bytes * 8;
645
80ce874a
P
646 if (keylen <= 0) {
647 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
648 return 0;
649 }
3538b0f7
P
650 /*
651 * Verify that the two keys are different.
4bd8b240 652 *
3538b0f7
P
653 * This addresses Rogaway's vulnerability.
654 * See comment in aes_xts_init_key() below.
655 */
2c840201
P
656 if ((!allow_insecure_decrypt || enc)
657 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 658 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
659 return 0;
660 }
661
0f113f3e
MC
662 xctx->stream = NULL;
663 /* key_len is two AES keys */
664 if (enc) {
665 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
666 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
667 switch (bits) {
668 case 128:
669 xctx->stream = aes128_t4_xts_encrypt;
670 break;
0f113f3e
MC
671 case 256:
672 xctx->stream = aes256_t4_xts_encrypt;
673 break;
674 default:
675 return 0;
676 }
677 } else {
3538b0f7 678 aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
679 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
680 switch (bits) {
681 case 128:
682 xctx->stream = aes128_t4_xts_decrypt;
683 break;
0f113f3e
MC
684 case 256:
685 xctx->stream = aes256_t4_xts_decrypt;
686 break;
687 default:
688 return 0;
689 }
690 }
691
3538b0f7 692 aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
693 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
694
695 xctx->xts.key1 = &xctx->ks1;
696 }
697
698 if (iv) {
699 xctx->xts.key2 = &xctx->ks2;
9197c226 700 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
701 }
702
703 return 1;
704}
705
5158c763 706# define aes_t4_xts_cipher aes_xts_cipher
c5f6da54 707static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 708 const unsigned char *in, size_t len);
c5f6da54
AP
709
710static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
711 const unsigned char *iv, int enc)
712{
6435f0f6 713 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
80ce874a
P
714
715 if (iv == NULL && key == NULL)
0f113f3e 716 return 1;
80ce874a
P
717
718 if (key != NULL) {
719 const int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
720
721 if (bits <= 0) {
722 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
723 return 0;
724 }
0f113f3e
MC
725 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
726 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
727 &cctx->ks, (block128_f) aes_t4_encrypt);
bdc985b1 728 cctx->str = NULL;
0f113f3e
MC
729 cctx->key_set = 1;
730 }
731 if (iv) {
9197c226 732 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
733 cctx->iv_set = 1;
734 }
735 return 1;
736}
737
5158c763 738# define aes_t4_ccm_cipher aes_ccm_cipher
c5f6da54 739static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 740 const unsigned char *in, size_t len);
c5f6da54 741
5158c763 742# ifndef OPENSSL_NO_OCB
e6b336ef 743static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
744 const unsigned char *iv, int enc)
745{
6435f0f6 746 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
80ce874a
P
747
748 if (iv == NULL && key == NULL)
0f113f3e 749 return 1;
80ce874a
P
750
751 if (key != NULL) {
752 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
753
754 if (keylen <= 0) {
755 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
756 return 0;
757 }
0f113f3e
MC
758 do {
759 /*
760 * We set both the encrypt and decrypt key here because decrypt
761 * needs both. We could possibly optimise to remove setting the
762 * decrypt for an encryption operation.
763 */
80ce874a
P
764 aes_t4_set_encrypt_key(key, keylen, &octx->ksenc.ks);
765 aes_t4_set_decrypt_key(key, keylen, &octx->ksdec.ks);
bdc985b1
AP
766 if (!CRYPTO_ocb128_init(&octx->ocb,
767 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 768 (block128_f) aes_t4_encrypt,
02dc0b82
AP
769 (block128_f) aes_t4_decrypt,
770 NULL))
0f113f3e
MC
771 return 0;
772 }
773 while (0);
774
775 /*
776 * If we have an iv we can set it directly, otherwise use saved IV.
777 */
778 if (iv == NULL && octx->iv_set)
779 iv = octx->iv;
780 if (iv) {
781 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
782 != 1)
783 return 0;
784 octx->iv_set = 1;
785 }
786 octx->key_set = 1;
787 } else {
788 /* If key set use IV, otherwise copy */
789 if (octx->key_set)
790 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
791 else
792 memcpy(octx->iv, iv, octx->ivlen);
793 octx->iv_set = 1;
794 }
795 return 1;
796}
797
5158c763 798# define aes_t4_ocb_cipher aes_ocb_cipher
e6b336ef 799static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e 800 const unsigned char *in, size_t len);
5158c763 801# endif /* OPENSSL_NO_OCB */
e6b336ef 802
87d06aed
MC
803# ifndef OPENSSL_NO_SIV
804# define aes_t4_siv_init_key aes_siv_init_key
805# define aes_t4_siv_cipher aes_siv_cipher
806# endif /* OPENSSL_NO_SIV */
807
5158c763 808# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
c5f6da54 809static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
0f113f3e
MC
810 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
811 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 812 EVP_ORIG_GLOBAL, \
0f113f3e
MC
813 aes_t4_init_key, \
814 aes_t4_##mode##_cipher, \
815 NULL, \
816 sizeof(EVP_AES_KEY), \
817 NULL,NULL,NULL,NULL }; \
c5f6da54 818static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
819 nid##_##keylen##_##nmode,blocksize, \
820 keylen/8,ivlen, \
821 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 822 EVP_ORIG_GLOBAL, \
0f113f3e
MC
823 aes_init_key, \
824 aes_##mode##_cipher, \
825 NULL, \
826 sizeof(EVP_AES_KEY), \
827 NULL,NULL,NULL,NULL }; \
c5f6da54
AP
828const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
829{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
830
5158c763 831# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
c5f6da54 832static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
0f113f3e 833 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
834 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
835 ivlen, \
0f113f3e 836 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 837 EVP_ORIG_GLOBAL, \
0f113f3e
MC
838 aes_t4_##mode##_init_key, \
839 aes_t4_##mode##_cipher, \
840 aes_##mode##_cleanup, \
841 sizeof(EVP_AES_##MODE##_CTX), \
842 NULL,NULL,aes_##mode##_ctrl,NULL }; \
c5f6da54 843static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 844 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
845 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
846 ivlen, \
0f113f3e 847 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 848 EVP_ORIG_GLOBAL, \
0f113f3e
MC
849 aes_##mode##_init_key, \
850 aes_##mode##_cipher, \
851 aes_##mode##_cleanup, \
852 sizeof(EVP_AES_##MODE##_CTX), \
853 NULL,NULL,aes_##mode##_ctrl,NULL }; \
c5f6da54
AP
854const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
855{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
856
459b15d4
SL
857#elif defined(S390X_aes_128_CAPABLE)
858/* IBM S390X support */
55bd169f
PS
859typedef struct {
860 union {
39147079 861 OSSL_UNION_ALIGN;
55bd169f
PS
862 /*-
863 * KM-AES parameter block - begin
864 * (see z/Architecture Principles of Operation >= SA22-7832-06)
865 */
866 struct {
867 unsigned char k[32];
868 } param;
869 /* KM-AES parameter block - end */
870 } km;
871 unsigned int fc;
872} S390X_AES_ECB_CTX;
873
dacd2a87
PS
874typedef struct {
875 union {
39147079 876 OSSL_UNION_ALIGN;
dacd2a87
PS
877 /*-
878 * KMO-AES parameter block - begin
879 * (see z/Architecture Principles of Operation >= SA22-7832-08)
880 */
881 struct {
882 unsigned char cv[16];
883 unsigned char k[32];
884 } param;
885 /* KMO-AES parameter block - end */
886 } kmo;
887 unsigned int fc;
888
889 int res;
890} S390X_AES_OFB_CTX;
891
74d38a86
PS
892typedef struct {
893 union {
39147079 894 OSSL_UNION_ALIGN;
74d38a86
PS
895 /*-
896 * KMF-AES parameter block - begin
897 * (see z/Architecture Principles of Operation >= SA22-7832-08)
898 */
899 struct {
900 unsigned char cv[16];
901 unsigned char k[32];
902 } param;
903 /* KMF-AES parameter block - end */
904 } kmf;
905 unsigned int fc;
906
907 int res;
908} S390X_AES_CFB_CTX;
909
96530eea
PS
910typedef struct {
911 union {
39147079 912 OSSL_UNION_ALIGN;
96530eea 913 /*-
5d2a6f4b
PS
914 * KMA-GCM-AES parameter block - begin
915 * (see z/Architecture Principles of Operation >= SA22-7832-11)
96530eea
PS
916 */
917 struct {
918 unsigned char reserved[12];
919 union {
920 unsigned int w;
921 unsigned char b[4];
922 } cv;
923 union {
924 unsigned long long g[2];
925 unsigned char b[16];
926 } t;
927 unsigned char h[16];
928 unsigned long long taadl;
929 unsigned long long tpcl;
930 union {
931 unsigned long long g[2];
932 unsigned int w[4];
933 } j0;
934 unsigned char k[32];
935 } param;
5d2a6f4b 936 /* KMA-GCM-AES parameter block - end */
96530eea
PS
937 } kma;
938 unsigned int fc;
939 int key_set;
940
941 unsigned char *iv;
942 int ivlen;
943 int iv_set;
944 int iv_gen;
945
946 int taglen;
947
948 unsigned char ares[16];
949 unsigned char mres[16];
950 unsigned char kres[16];
951 int areslen;
952 int mreslen;
953 int kreslen;
954
955 int tls_aad_len;
d6b34570 956 uint64_t tls_enc_records; /* Number of TLS records encrypted */
96530eea
PS
957} S390X_AES_GCM_CTX;
958
39f5b069
PS
959typedef struct {
960 union {
39147079 961 OSSL_UNION_ALIGN;
39f5b069
PS
962 /*-
963 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
964 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
965 * rounds field is used to store the function code and that the key
966 * schedule is not stored (if aes hardware support is detected).
967 */
968 struct {
969 unsigned char pad[16];
970 AES_KEY k;
971 } key;
972
973 struct {
974 /*-
975 * KMAC-AES parameter block - begin
976 * (see z/Architecture Principles of Operation >= SA22-7832-08)
977 */
978 struct {
979 union {
980 unsigned long long g[2];
981 unsigned char b[16];
982 } icv;
983 unsigned char k[32];
984 } kmac_param;
79c44b4e 985 /* KMAC-AES parameter block - end */
39f5b069
PS
986
987 union {
988 unsigned long long g[2];
989 unsigned char b[16];
990 } nonce;
991 union {
992 unsigned long long g[2];
993 unsigned char b[16];
994 } buf;
995
996 unsigned long long blocks;
997 int l;
998 int m;
999 int tls_aad_len;
1000 int iv_set;
1001 int tag_set;
1002 int len_set;
1003 int key_set;
1004
1005 unsigned char pad[140];
1006 unsigned int fc;
1007 } ccm;
1008 } aes;
1009} S390X_AES_CCM_CTX;
1010
96530eea
PS
1011# define s390x_aes_init_key aes_init_key
1012static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1013 const unsigned char *iv, int enc);
1014
dd6b2706 1015# define S390X_AES_CBC_CTX EVP_AES_KEY
55bd169f
PS
1016
1017# define s390x_aes_cbc_init_key aes_init_key
96530eea
PS
1018
1019# define s390x_aes_cbc_cipher aes_cbc_cipher
1020static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1021 const unsigned char *in, size_t len);
1022
55bd169f
PS
1023static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1024 const unsigned char *key,
1025 const unsigned char *iv, int enc)
1026{
1027 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
ed576acd 1028 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
55bd169f 1029
80ce874a
P
1030 if (keylen <= 0) {
1031 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1032 return 0;
1033 }
55bd169f
PS
1034 cctx->fc = S390X_AES_FC(keylen);
1035 if (!enc)
1036 cctx->fc |= S390X_DECRYPT;
1037
1038 memcpy(cctx->km.param.k, key, keylen);
1039 return 1;
1040}
96530eea 1041
96530eea 1042static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
55bd169f
PS
1043 const unsigned char *in, size_t len)
1044{
1045 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1046
1047 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1048 return 1;
1049}
96530eea 1050
dacd2a87
PS
1051static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1052 const unsigned char *key,
1053 const unsigned char *ivec, int enc)
1054{
1055 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
9197c226 1056 const unsigned char *iv = ctx->oiv;
ed576acd
TM
1057 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1058 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
55bd169f 1059
80ce874a
P
1060 if (keylen <= 0) {
1061 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1062 return 0;
1063 }
1064 if (ivlen <= 0) {
1065 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1066 return 0;
1067 }
dacd2a87
PS
1068 memcpy(cctx->kmo.param.cv, iv, ivlen);
1069 memcpy(cctx->kmo.param.k, key, keylen);
1070 cctx->fc = S390X_AES_FC(keylen);
1071 cctx->res = 0;
1072 return 1;
1073}
96530eea 1074
96530eea 1075static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
dacd2a87
PS
1076 const unsigned char *in, size_t len)
1077{
1078 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
c719ea17
IF
1079 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1080 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
dacd2a87
PS
1081 int n = cctx->res;
1082 int rem;
1083
c719ea17 1084 memcpy(cctx->kmo.param.cv, iv, ivlen);
dacd2a87
PS
1085 while (n && len) {
1086 *out = *in ^ cctx->kmo.param.cv[n];
1087 n = (n + 1) & 0xf;
1088 --len;
1089 ++in;
1090 ++out;
1091 }
1092
1093 rem = len & 0xf;
1094
1095 len &= ~(size_t)0xf;
1096 if (len) {
1097 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1098
1099 out += len;
1100 in += len;
1101 }
1102
1103 if (rem) {
1104 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1105 cctx->kmo.param.k);
1106
1107 while (rem--) {
1108 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1109 ++n;
1110 }
1111 }
1112
c719ea17 1113 memcpy(iv, cctx->kmo.param.cv, ivlen);
dacd2a87
PS
1114 cctx->res = n;
1115 return 1;
1116}
96530eea 1117
74d38a86
PS
1118static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1119 const unsigned char *key,
1120 const unsigned char *ivec, int enc)
1121{
1122 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
9197c226 1123 const unsigned char *iv = ctx->oiv;
ed576acd
TM
1124 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1125 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
74d38a86 1126
80ce874a
P
1127 if (keylen <= 0) {
1128 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1129 return 0;
1130 }
1131 if (ivlen <= 0) {
1132 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1133 return 0;
1134 }
74d38a86
PS
1135 cctx->fc = S390X_AES_FC(keylen);
1136 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1137 if (!enc)
1138 cctx->fc |= S390X_DECRYPT;
55bd169f 1139
74d38a86
PS
1140 cctx->res = 0;
1141 memcpy(cctx->kmf.param.cv, iv, ivlen);
1142 memcpy(cctx->kmf.param.k, key, keylen);
1143 return 1;
1144}
96530eea 1145
96530eea 1146static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
74d38a86
PS
1147 const unsigned char *in, size_t len)
1148{
1149 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
ed576acd
TM
1150 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1151 const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
c719ea17
IF
1152 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1153 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
74d38a86
PS
1154 int n = cctx->res;
1155 int rem;
1156 unsigned char tmp;
1157
80ce874a
P
1158 if (keylen <= 0) {
1159 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1160 return 0;
1161 }
1162 if (ivlen <= 0) {
1163 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1164 return 0;
1165 }
c719ea17 1166 memcpy(cctx->kmf.param.cv, iv, ivlen);
74d38a86
PS
1167 while (n && len) {
1168 tmp = *in;
1169 *out = cctx->kmf.param.cv[n] ^ tmp;
1170 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1171 n = (n + 1) & 0xf;
1172 --len;
1173 ++in;
1174 ++out;
1175 }
1176
1177 rem = len & 0xf;
1178
1179 len &= ~(size_t)0xf;
1180 if (len) {
1181 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1182
1183 out += len;
1184 in += len;
1185 }
1186
1187 if (rem) {
1188 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1189 S390X_AES_FC(keylen), cctx->kmf.param.k);
1190
1191 while (rem--) {
1192 tmp = in[n];
1193 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1194 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1195 ++n;
1196 }
1197 }
96530eea 1198
c719ea17 1199 memcpy(iv, cctx->kmf.param.cv, ivlen);
74d38a86
PS
1200 cctx->res = n;
1201 return 1;
1202}
1203
74d38a86
PS
1204static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1205 const unsigned char *key,
1206 const unsigned char *ivec, int enc)
1207{
1208 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
9197c226 1209 const unsigned char *iv = ctx->oiv;
ed576acd
TM
1210 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1211 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
74d38a86 1212
80ce874a
P
1213 if (keylen <= 0) {
1214 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1215 return 0;
1216 }
1217 if (ivlen <= 0) {
1218 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1219 return 0;
1220 }
74d38a86
PS
1221 cctx->fc = S390X_AES_FC(keylen);
1222 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1223 if (!enc)
1224 cctx->fc |= S390X_DECRYPT;
96530eea 1225
74d38a86
PS
1226 memcpy(cctx->kmf.param.cv, iv, ivlen);
1227 memcpy(cctx->kmf.param.k, key, keylen);
1228 return 1;
1229}
55bd169f 1230
96530eea 1231static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
74d38a86
PS
1232 const unsigned char *in, size_t len)
1233{
1234 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
c719ea17
IF
1235 const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1236 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
74d38a86 1237
c719ea17 1238 memcpy(cctx->kmf.param.cv, iv, ivlen);
74d38a86 1239 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
c719ea17 1240 memcpy(iv, cctx->kmf.param.cv, ivlen);
74d38a86
PS
1241 return 1;
1242}
96530eea 1243
55bd169f
PS
1244# define s390x_aes_cfb1_init_key aes_init_key
1245
96530eea
PS
1246# define s390x_aes_cfb1_cipher aes_cfb1_cipher
1247static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1248 const unsigned char *in, size_t len);
1249
dd6b2706 1250# define S390X_AES_CTR_CTX EVP_AES_KEY
55bd169f
PS
1251
1252# define s390x_aes_ctr_init_key aes_init_key
96530eea
PS
1253
1254# define s390x_aes_ctr_cipher aes_ctr_cipher
1255static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1256 const unsigned char *in, size_t len);
1257
bcf082d1 1258/* iv + padding length for iv lengths != 12 */
dd6b2706 1259# define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
96530eea 1260
5d2a6f4b
PS
1261/*-
1262 * Process additional authenticated data. Returns 0 on success. Code is
1263 * big-endian.
1264 */
96530eea
PS
1265static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1266 size_t len)
1267{
1268 unsigned long long alen;
1269 int n, rem;
1270
1271 if (ctx->kma.param.tpcl)
1272 return -2;
1273
1274 alen = ctx->kma.param.taadl + len;
1275 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1276 return -1;
1277 ctx->kma.param.taadl = alen;
1278
1279 n = ctx->areslen;
1280 if (n) {
1281 while (n && len) {
1282 ctx->ares[n] = *aad;
1283 n = (n + 1) & 0xf;
1284 ++aad;
1285 --len;
1286 }
1287 /* ctx->ares contains a complete block if offset has wrapped around */
1288 if (!n) {
1289 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1290 ctx->fc |= S390X_KMA_HS;
1291 }
1292 ctx->areslen = n;
1293 }
1294
1295 rem = len & 0xf;
1296
25868993 1297 len &= ~(size_t)0xf;
96530eea
PS
1298 if (len) {
1299 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1300 aad += len;
1301 ctx->fc |= S390X_KMA_HS;
1302 }
1303
1304 if (rem) {
1305 ctx->areslen = rem;
1306
1307 do {
1308 --rem;
1309 ctx->ares[rem] = aad[rem];
1310 } while (rem);
1311 }
1312 return 0;
1313}
1314
5d2a6f4b
PS
1315/*-
1316 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1317 * success. Code is big-endian.
1318 */
96530eea
PS
1319static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1320 unsigned char *out, size_t len)
1321{
1322 const unsigned char *inptr;
1323 unsigned long long mlen;
1324 union {
1325 unsigned int w[4];
1326 unsigned char b[16];
1327 } buf;
1328 size_t inlen;
1329 int n, rem, i;
1330
1331 mlen = ctx->kma.param.tpcl + len;
1332 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1333 return -1;
1334 ctx->kma.param.tpcl = mlen;
1335
1336 n = ctx->mreslen;
1337 if (n) {
1338 inptr = in;
1339 inlen = len;
1340 while (n && inlen) {
1341 ctx->mres[n] = *inptr;
1342 n = (n + 1) & 0xf;
1343 ++inptr;
1344 --inlen;
1345 }
1346 /* ctx->mres contains a complete block if offset has wrapped around */
1347 if (!n) {
1348 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1349 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1350 ctx->fc |= S390X_KMA_HS;
1351 ctx->areslen = 0;
1352
1353 /* previous call already encrypted/decrypted its remainder,
1354 * see comment below */
1355 n = ctx->mreslen;
1356 while (n) {
1357 *out = buf.b[n];
1358 n = (n + 1) & 0xf;
1359 ++out;
1360 ++in;
1361 --len;
1362 }
1363 ctx->mreslen = 0;
1364 }
1365 }
1366
1367 rem = len & 0xf;
1368
25868993 1369 len &= ~(size_t)0xf;
96530eea
PS
1370 if (len) {
1371 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1372 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1373 in += len;
1374 out += len;
1375 ctx->fc |= S390X_KMA_HS;
1376 ctx->areslen = 0;
1377 }
1378
1379 /*-
1380 * If there is a remainder, it has to be saved such that it can be
1381 * processed by kma later. However, we also have to do the for-now
1382 * unauthenticated encryption/decryption part here and now...
1383 */
1384 if (rem) {
1385 if (!ctx->mreslen) {
1386 buf.w[0] = ctx->kma.param.j0.w[0];
1387 buf.w[1] = ctx->kma.param.j0.w[1];
1388 buf.w[2] = ctx->kma.param.j0.w[2];
1389 buf.w[3] = ctx->kma.param.cv.w + 1;
1390 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1391 }
1392
1393 n = ctx->mreslen;
1394 for (i = 0; i < rem; i++) {
1395 ctx->mres[n + i] = in[i];
1396 out[i] = in[i] ^ ctx->kres[n + i];
1397 }
1398
1399 ctx->mreslen += rem;
1400 }
1401 return 0;
1402}
1403
5d2a6f4b
PS
1404/*-
1405 * Initialize context structure. Code is big-endian.
1406 */
96530eea
PS
1407static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1408 const unsigned char *iv)
1409{
1410 ctx->kma.param.t.g[0] = 0;
1411 ctx->kma.param.t.g[1] = 0;
1412 ctx->kma.param.tpcl = 0;
1413 ctx->kma.param.taadl = 0;
1414 ctx->mreslen = 0;
1415 ctx->areslen = 0;
1416 ctx->kreslen = 0;
1417
1418 if (ctx->ivlen == 12) {
1419 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1420 ctx->kma.param.j0.w[3] = 1;
1421 ctx->kma.param.cv.w = 1;
1422 } else {
1423 /* ctx->iv has the right size and is already padded. */
1424 memcpy(ctx->iv, iv, ctx->ivlen);
1425 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1426 ctx->fc, &ctx->kma.param);
1427 ctx->fc |= S390X_KMA_HS;
1428
1429 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1430 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1431 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1432 ctx->kma.param.t.g[0] = 0;
1433 ctx->kma.param.t.g[1] = 0;
1434 }
1435}
1436
5d2a6f4b
PS
1437/*-
1438 * Performs various operations on the context structure depending on control
1439 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1440 * Code is big-endian.
1441 */
96530eea
PS
1442static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1443{
1444 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1445 S390X_AES_GCM_CTX *gctx_out;
1446 EVP_CIPHER_CTX *out;
9197c226 1447 unsigned char *buf;
96530eea
PS
1448 int ivlen, enc, len;
1449
1450 switch (type) {
1451 case EVP_CTRL_INIT:
ed576acd 1452 ivlen = EVP_CIPHER_get_iv_length(c->cipher);
96530eea
PS
1453 gctx->key_set = 0;
1454 gctx->iv_set = 0;
1455 gctx->ivlen = ivlen;
9197c226 1456 gctx->iv = c->iv;
96530eea
PS
1457 gctx->taglen = -1;
1458 gctx->iv_gen = 0;
1459 gctx->tls_aad_len = -1;
1460 return 1;
1461
7dddf2fc
SL
1462 case EVP_CTRL_GET_IVLEN:
1463 *(int *)ptr = gctx->ivlen;
1464 return 1;
1465
96530eea
PS
1466 case EVP_CTRL_AEAD_SET_IVLEN:
1467 if (arg <= 0)
1468 return 0;
1469
1470 if (arg != 12) {
96530eea
PS
1471 len = S390X_gcm_ivpadlen(arg);
1472
1473 /* Allocate memory for iv if needed. */
1474 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
9197c226 1475 if (gctx->iv != c->iv)
96530eea
PS
1476 OPENSSL_free(gctx->iv);
1477
e077455e 1478 if ((gctx->iv = OPENSSL_malloc(len)) == NULL)
96530eea
PS
1479 return 0;
1480 }
1481 /* Add padding. */
1482 memset(gctx->iv + arg, 0, len - arg - 8);
1483 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1484 }
1485 gctx->ivlen = arg;
1486 return 1;
1487
1488 case EVP_CTRL_AEAD_SET_TAG:
1489 buf = EVP_CIPHER_CTX_buf_noconst(c);
ed576acd 1490 enc = EVP_CIPHER_CTX_is_encrypting(c);
96530eea
PS
1491 if (arg <= 0 || arg > 16 || enc)
1492 return 0;
1493
1494 memcpy(buf, ptr, arg);
1495 gctx->taglen = arg;
1496 return 1;
1497
1498 case EVP_CTRL_AEAD_GET_TAG:
ed576acd 1499 enc = EVP_CIPHER_CTX_is_encrypting(c);
96530eea
PS
1500 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1501 return 0;
1502
1503 memcpy(ptr, gctx->kma.param.t.b, arg);
1504 return 1;
1505
1506 case EVP_CTRL_GCM_SET_IV_FIXED:
1507 /* Special case: -1 length restores whole iv */
1508 if (arg == -1) {
1509 memcpy(gctx->iv, ptr, gctx->ivlen);
1510 gctx->iv_gen = 1;
1511 return 1;
1512 }
1513 /*
1514 * Fixed field must be at least 4 bytes and invocation field at least
1515 * 8.
1516 */
1517 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1518 return 0;
1519
1520 if (arg)
1521 memcpy(gctx->iv, ptr, arg);
1522
ed576acd 1523 enc = EVP_CIPHER_CTX_is_encrypting(c);
16cfc2c9
KR
1524 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1525 return 0;
96530eea
PS
1526
1527 gctx->iv_gen = 1;
1528 return 1;
1529
1530 case EVP_CTRL_GCM_IV_GEN:
1531 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1532 return 0;
1533
1534 s390x_aes_gcm_setiv(gctx, gctx->iv);
1535
1536 if (arg <= 0 || arg > gctx->ivlen)
1537 arg = gctx->ivlen;
1538
1539 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1540 /*
1541 * Invocation field will be at least 8 bytes in size and so no need
1542 * to check wrap around or increment more than last 8 bytes.
1543 */
03a5e5ae 1544 ctr64_inc(gctx->iv + gctx->ivlen - 8);
96530eea
PS
1545 gctx->iv_set = 1;
1546 return 1;
1547
1548 case EVP_CTRL_GCM_SET_IV_INV:
ed576acd 1549 enc = EVP_CIPHER_CTX_is_encrypting(c);
96530eea
PS
1550 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1551 return 0;
1552
1553 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1554 s390x_aes_gcm_setiv(gctx, gctx->iv);
1555 gctx->iv_set = 1;
1556 return 1;
1557
1558 case EVP_CTRL_AEAD_TLS1_AAD:
1559 /* Save the aad for later use. */
1560 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1561 return 0;
1562
1563 buf = EVP_CIPHER_CTX_buf_noconst(c);
1564 memcpy(buf, ptr, arg);
1565 gctx->tls_aad_len = arg;
d6b34570 1566 gctx->tls_enc_records = 0;
96530eea
PS
1567
1568 len = buf[arg - 2] << 8 | buf[arg - 1];
1569 /* Correct length for explicit iv. */
1570 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1571 return 0;
1572 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1573
1574 /* If decrypting correct for tag too. */
ed576acd 1575 enc = EVP_CIPHER_CTX_is_encrypting(c);
96530eea
PS
1576 if (!enc) {
1577 if (len < EVP_GCM_TLS_TAG_LEN)
1578 return 0;
1579 len -= EVP_GCM_TLS_TAG_LEN;
1580 }
1581 buf[arg - 2] = len >> 8;
1582 buf[arg - 1] = len & 0xff;
1583 /* Extra padding: tag appended to record. */
1584 return EVP_GCM_TLS_TAG_LEN;
1585
1586 case EVP_CTRL_COPY:
1587 out = ptr;
1588 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
96530eea 1589
9197c226
BK
1590 if (gctx->iv == c->iv) {
1591 gctx_out->iv = out->iv;
96530eea
PS
1592 } else {
1593 len = S390X_gcm_ivpadlen(gctx->ivlen);
1594
e077455e 1595 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL)
96530eea
PS
1596 return 0;
1597
1598 memcpy(gctx_out->iv, gctx->iv, len);
1599 }
1600 return 1;
1601
1602 default:
1603 return -1;
1604 }
1605}
1606
5d2a6f4b
PS
1607/*-
1608 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1609 */
96530eea
PS
1610static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1611 const unsigned char *key,
1612 const unsigned char *iv, int enc)
1613{
1614 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1615 int keylen;
1616
1617 if (iv == NULL && key == NULL)
1618 return 1;
1619
1620 if (key != NULL) {
ed576acd 1621 keylen = EVP_CIPHER_CTX_get_key_length(ctx);
80ce874a
P
1622 if (keylen <= 0) {
1623 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1624 return 0;
1625 }
1626
96530eea
PS
1627 memcpy(&gctx->kma.param.k, key, keylen);
1628
8eb399fb 1629 gctx->fc = S390X_AES_FC(keylen);
96530eea
PS
1630 if (!enc)
1631 gctx->fc |= S390X_DECRYPT;
1632
1633 if (iv == NULL && gctx->iv_set)
1634 iv = gctx->iv;
1635
1636 if (iv != NULL) {
1637 s390x_aes_gcm_setiv(gctx, iv);
1638 gctx->iv_set = 1;
1639 }
1640 gctx->key_set = 1;
1641 } else {
1642 if (gctx->key_set)
1643 s390x_aes_gcm_setiv(gctx, iv);
1644 else
1645 memcpy(gctx->iv, iv, gctx->ivlen);
1646
1647 gctx->iv_set = 1;
1648 gctx->iv_gen = 0;
1649 }
1650 return 1;
1651}
1652
5d2a6f4b
PS
1653/*-
1654 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1655 * if successful. Otherwise -1 is returned. Code is big-endian.
1656 */
96530eea
PS
1657static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1658 const unsigned char *in, size_t len)
1659{
1660 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1661 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
ed576acd 1662 const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
96530eea
PS
1663 int rv = -1;
1664
1665 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1666 return -1;
1667
d6b34570
P
1668 /*
1669 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1670 * Requirements from SP 800-38D". The requirements is for one party to the
1671 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1672 * side only.
1673 */
b134300a 1674 if (enc && ++gctx->tls_enc_records == 0) {
9311d0c4 1675 ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
d6b34570
P
1676 goto err;
1677 }
1678
96530eea
PS
1679 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1680 : EVP_CTRL_GCM_SET_IV_INV,
1681 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1682 goto err;
1683
1684 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1685 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1686 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1687
1688 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1689 gctx->kma.param.tpcl = len << 3;
1690 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1691 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1692
1693 if (enc) {
1694 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1695 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1696 } else {
1697 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1698 EVP_GCM_TLS_TAG_LEN)) {
1699 OPENSSL_cleanse(out, len);
1700 goto err;
1701 }
1702 rv = len;
1703 }
1704err:
1705 gctx->iv_set = 0;
1706 gctx->tls_aad_len = -1;
1707 return rv;
1708}
1709
5d2a6f4b
PS
1710/*-
1711 * Called from EVP layer to initialize context, process additional
1712 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1713 * ciphertext or process a TLS packet, depending on context. Returns bytes
1714 * written on success. Otherwise -1 is returned. Code is big-endian.
1715 */
96530eea
PS
1716static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1717 const unsigned char *in, size_t len)
1718{
1719 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1720 unsigned char *buf, tmp[16];
1721 int enc;
1722
1723 if (!gctx->key_set)
1724 return -1;
1725
1726 if (gctx->tls_aad_len >= 0)
1727 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1728
1729 if (!gctx->iv_set)
1730 return -1;
1731
1732 if (in != NULL) {
1733 if (out == NULL) {
1734 if (s390x_aes_gcm_aad(gctx, in, len))
1735 return -1;
1736 } else {
1737 if (s390x_aes_gcm(gctx, in, out, len))
1738 return -1;
1739 }
1740 return len;
1741 } else {
1742 gctx->kma.param.taadl <<= 3;
1743 gctx->kma.param.tpcl <<= 3;
1744 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1745 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1746 /* recall that we already did en-/decrypt gctx->mres
1747 * and returned it to caller... */
1748 OPENSSL_cleanse(tmp, gctx->mreslen);
1749 gctx->iv_set = 0;
1750
ed576acd 1751 enc = EVP_CIPHER_CTX_is_encrypting(ctx);
96530eea
PS
1752 if (enc) {
1753 gctx->taglen = 16;
1754 } else {
1755 if (gctx->taglen < 0)
1756 return -1;
1757
1758 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1759 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1760 return -1;
1761 }
1762 return 0;
1763 }
1764}
1765
1766static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1767{
1768 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
96530eea
PS
1769
1770 if (gctx == NULL)
1771 return 0;
1772
9197c226 1773 if (gctx->iv != c->iv)
96530eea
PS
1774 OPENSSL_free(gctx->iv);
1775
1776 OPENSSL_cleanse(gctx, sizeof(*gctx));
1777 return 1;
1778}
1779
dd6b2706 1780# define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
96530eea
PS
1781
1782# define s390x_aes_xts_init_key aes_xts_init_key
1783static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1784 const unsigned char *key,
1785 const unsigned char *iv, int enc);
1786# define s390x_aes_xts_cipher aes_xts_cipher
1787static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1788 const unsigned char *in, size_t len);
1789# define s390x_aes_xts_ctrl aes_xts_ctrl
1790static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1791# define s390x_aes_xts_cleanup aes_xts_cleanup
1792
39f5b069
PS
1793/*-
1794 * Set nonce and length fields. Code is big-endian.
1795 */
1796static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1797 const unsigned char *nonce,
1798 size_t mlen)
1799{
1800 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1801 ctx->aes.ccm.nonce.g[1] = mlen;
1802 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1803}
1804
1805/*-
1806 * Process additional authenticated data. Code is big-endian.
1807 */
1808static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1809 size_t alen)
1810{
1811 unsigned char *ptr;
1812 int i, rem;
1813
1814 if (!alen)
1815 return;
1816
1817 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1818
1819 /* Suppress 'type-punned pointer dereference' warning. */
1820 ptr = ctx->aes.ccm.buf.b;
1821
1822 if (alen < ((1 << 16) - (1 << 8))) {
1823 *(uint16_t *)ptr = alen;
1824 i = 2;
1825 } else if (sizeof(alen) == 8
1826 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1827 *(uint16_t *)ptr = 0xffff;
1828 *(uint64_t *)(ptr + 2) = alen;
1829 i = 10;
1830 } else {
1831 *(uint16_t *)ptr = 0xfffe;
1832 *(uint32_t *)(ptr + 2) = alen;
1833 i = 6;
1834 }
1835
1836 while (i < 16 && alen) {
1837 ctx->aes.ccm.buf.b[i] = *aad;
1838 ++aad;
1839 --alen;
1840 ++i;
1841 }
1842 while (i < 16) {
1843 ctx->aes.ccm.buf.b[i] = 0;
1844 ++i;
1845 }
1846
1847 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1848 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1849 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1850 &ctx->aes.ccm.kmac_param);
1851 ctx->aes.ccm.blocks += 2;
1852
1853 rem = alen & 0xf;
25868993 1854 alen &= ~(size_t)0xf;
39f5b069
PS
1855 if (alen) {
1856 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1857 ctx->aes.ccm.blocks += alen >> 4;
1858 aad += alen;
1859 }
1860 if (rem) {
1861 for (i = 0; i < rem; i++)
1862 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1863
1864 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1865 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1866 ctx->aes.ccm.kmac_param.k);
1867 ctx->aes.ccm.blocks++;
1868 }
1869}
1870
1871/*-
1872 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1873 * success.
1874 */
1875static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1876 unsigned char *out, size_t len, int enc)
1877{
1878 size_t n, rem;
1879 unsigned int i, l, num;
1880 unsigned char flags;
1881
1882 flags = ctx->aes.ccm.nonce.b[0];
1883 if (!(flags & S390X_CCM_AAD_FLAG)) {
1884 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1885 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1886 ctx->aes.ccm.blocks++;
1887 }
1888 l = flags & 0x7;
1889 ctx->aes.ccm.nonce.b[0] = l;
1890
1891 /*-
1892 * Reconstruct length from encoded length field
1893 * and initialize it with counter value.
1894 */
1895 n = 0;
1896 for (i = 15 - l; i < 15; i++) {
1897 n |= ctx->aes.ccm.nonce.b[i];
1898 ctx->aes.ccm.nonce.b[i] = 0;
1899 n <<= 8;
1900 }
1901 n |= ctx->aes.ccm.nonce.b[15];
1902 ctx->aes.ccm.nonce.b[15] = 1;
1903
1904 if (n != len)
dd6b2706 1905 return -1; /* length mismatch */
39f5b069
PS
1906
1907 if (enc) {
1908 /* Two operations per block plus one for tag encryption */
1909 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1910 if (ctx->aes.ccm.blocks > (1ULL << 61))
dd6b2706 1911 return -2; /* too much data */
39f5b069
PS
1912 }
1913
1914 num = 0;
1915 rem = len & 0xf;
25868993 1916 len &= ~(size_t)0xf;
39f5b069
PS
1917
1918 if (enc) {
1919 /* mac-then-encrypt */
1920 if (len)
1921 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1922 if (rem) {
1923 for (i = 0; i < rem; i++)
1924 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1925
1926 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1927 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1928 ctx->aes.ccm.kmac_param.k);
1929 }
1930
1931 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1932 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1933 &num, (ctr128_f)AES_ctr32_encrypt);
1934 } else {
1935 /* decrypt-then-mac */
1936 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1937 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1938 &num, (ctr128_f)AES_ctr32_encrypt);
1939
1940 if (len)
1941 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1942 if (rem) {
1943 for (i = 0; i < rem; i++)
1944 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1945
1946 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1947 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1948 ctx->aes.ccm.kmac_param.k);
1949 }
1950 }
1951 /* encrypt tag */
1952 for (i = 15 - l; i < 16; i++)
1953 ctx->aes.ccm.nonce.b[i] = 0;
1954
1955 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1956 ctx->aes.ccm.kmac_param.k);
1957 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1958 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1959
dd6b2706 1960 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
39f5b069
PS
1961 return 0;
1962}
1963
1964/*-
1965 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1966 * if successful. Otherwise -1 is returned.
1967 */
1968static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1969 const unsigned char *in, size_t len)
1970{
1971 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
9197c226 1972 unsigned char *ivec = ctx->iv;
39f5b069 1973 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
ed576acd 1974 const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
39f5b069
PS
1975
1976 if (out != in
1977 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1978 return -1;
1979
1980 if (enc) {
1981 /* Set explicit iv (sequence number). */
1982 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1983 }
1984
1985 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1986 /*-
1987 * Get explicit iv (sequence number). We already have fixed iv
1988 * (server/client_write_iv) here.
1989 */
1990 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1991 s390x_aes_ccm_setiv(cctx, ivec, len);
1992
1993 /* Process aad (sequence number|type|version|length) */
1994 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1995
1996 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1997 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
96530eea 1998
39f5b069
PS
1999 if (enc) {
2000 if (s390x_aes_ccm(cctx, in, out, len, enc))
2001 return -1;
2002
2003 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2004 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2005 } else {
2006 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2007 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2008 cctx->aes.ccm.m))
2009 return len;
2010 }
2011
2012 OPENSSL_cleanse(out, len);
2013 return -1;
2014 }
2015}
2016
2017/*-
2018 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2019 * returned.
2020 */
96530eea
PS
2021static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2022 const unsigned char *key,
39f5b069
PS
2023 const unsigned char *iv, int enc)
2024{
2025 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
39f5b069
PS
2026 int keylen;
2027
2028 if (iv == NULL && key == NULL)
2029 return 1;
2030
2031 if (key != NULL) {
ed576acd 2032 keylen = EVP_CIPHER_CTX_get_key_length(ctx);
80ce874a
P
2033 if (keylen <= 0) {
2034 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
2035 return 0;
2036 }
2037
8eb399fb 2038 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
39f5b069
PS
2039 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2040
2041 /* Store encoded m and l. */
2042 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2043 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2044 memset(cctx->aes.ccm.nonce.b + 1, 0,
2045 sizeof(cctx->aes.ccm.nonce.b));
2046 cctx->aes.ccm.blocks = 0;
2047
2048 cctx->aes.ccm.key_set = 1;
2049 }
2050
2051 if (iv != NULL) {
9197c226 2052 memcpy(ctx->iv, iv, 15 - cctx->aes.ccm.l);
39f5b069
PS
2053
2054 cctx->aes.ccm.iv_set = 1;
2055 }
2056
2057 return 1;
2058}
2059
2060/*-
2061 * Called from EVP layer to initialize context, process additional
2062 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2063 * plaintext or process a TLS packet, depending on context. Returns bytes
2064 * written on success. Otherwise -1 is returned.
2065 */
96530eea 2066static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
39f5b069
PS
2067 const unsigned char *in, size_t len)
2068{
2069 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
ed576acd 2070 const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
39f5b069 2071 int rv;
9197c226 2072 unsigned char *buf;
39f5b069
PS
2073
2074 if (!cctx->aes.ccm.key_set)
2075 return -1;
2076
2077 if (cctx->aes.ccm.tls_aad_len >= 0)
2078 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2079
2080 /*-
2081 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2082 * so integrity must be checked already at Update() i.e., before
2083 * potentially corrupted data is output.
2084 */
2085 if (in == NULL && out != NULL)
2086 return 0;
2087
2088 if (!cctx->aes.ccm.iv_set)
2089 return -1;
2090
39f5b069
PS
2091 if (out == NULL) {
2092 /* Update(): Pass message length. */
2093 if (in == NULL) {
9197c226 2094 s390x_aes_ccm_setiv(cctx, ctx->iv, len);
39f5b069
PS
2095
2096 cctx->aes.ccm.len_set = 1;
2097 return len;
2098 }
2099
2100 /* Update(): Process aad. */
2101 if (!cctx->aes.ccm.len_set && len)
2102 return -1;
2103
2104 s390x_aes_ccm_aad(cctx, in, len);
2105 return len;
2106 }
2107
887e22dd
PS
2108 /* The tag must be set before actually decrypting data */
2109 if (!enc && !cctx->aes.ccm.tag_set)
2110 return -1;
2111
39f5b069
PS
2112 /* Update(): Process message. */
2113
2114 if (!cctx->aes.ccm.len_set) {
2115 /*-
46d08509 2116 * In case message length was not previously set explicitly via
39f5b069
PS
2117 * Update(), set it now.
2118 */
9197c226 2119 s390x_aes_ccm_setiv(cctx, ctx->iv, len);
39f5b069
PS
2120
2121 cctx->aes.ccm.len_set = 1;
2122 }
2123
2124 if (enc) {
2125 if (s390x_aes_ccm(cctx, in, out, len, enc))
2126 return -1;
2127
2128 cctx->aes.ccm.tag_set = 1;
2129 return len;
2130 } else {
2131 rv = -1;
2132
2133 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2134 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2135 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2136 cctx->aes.ccm.m))
2137 rv = len;
2138 }
2139
2140 if (rv == -1)
2141 OPENSSL_cleanse(out, len);
2142
2143 cctx->aes.ccm.iv_set = 0;
2144 cctx->aes.ccm.tag_set = 0;
2145 cctx->aes.ccm.len_set = 0;
2146 return rv;
2147 }
2148}
2149
2150/*-
2151 * Performs various operations on the context structure depending on control
2152 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2153 * Code is big-endian.
2154 */
2155static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2156{
2157 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
9197c226 2158 unsigned char *buf;
39f5b069
PS
2159 int enc, len;
2160
2161 switch (type) {
2162 case EVP_CTRL_INIT:
2163 cctx->aes.ccm.key_set = 0;
2164 cctx->aes.ccm.iv_set = 0;
2165 cctx->aes.ccm.l = 8;
2166 cctx->aes.ccm.m = 12;
2167 cctx->aes.ccm.tag_set = 0;
2168 cctx->aes.ccm.len_set = 0;
2169 cctx->aes.ccm.tls_aad_len = -1;
2170 return 1;
2171
7dddf2fc
SL
2172 case EVP_CTRL_GET_IVLEN:
2173 *(int *)ptr = 15 - cctx->aes.ccm.l;
2174 return 1;
2175
39f5b069
PS
2176 case EVP_CTRL_AEAD_TLS1_AAD:
2177 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2178 return 0;
2179
2180 /* Save the aad for later use. */
2181 buf = EVP_CIPHER_CTX_buf_noconst(c);
2182 memcpy(buf, ptr, arg);
2183 cctx->aes.ccm.tls_aad_len = arg;
2184
03a5e5ae 2185 len = buf[arg - 2] << 8 | buf[arg - 1];
39f5b069
PS
2186 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2187 return 0;
2188
2189 /* Correct length for explicit iv. */
2190 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2191
ed576acd 2192 enc = EVP_CIPHER_CTX_is_encrypting(c);
39f5b069
PS
2193 if (!enc) {
2194 if (len < cctx->aes.ccm.m)
2195 return 0;
2196
2197 /* Correct length for tag. */
2198 len -= cctx->aes.ccm.m;
2199 }
2200
03a5e5ae
PS
2201 buf[arg - 2] = len >> 8;
2202 buf[arg - 1] = len & 0xff;
2203
39f5b069
PS
2204 /* Extra padding: tag appended to record. */
2205 return cctx->aes.ccm.m;
2206
2207 case EVP_CTRL_CCM_SET_IV_FIXED:
2208 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2209 return 0;
2210
2211 /* Copy to first part of the iv. */
9197c226 2212 memcpy(c->iv, ptr, arg);
39f5b069
PS
2213 return 1;
2214
2215 case EVP_CTRL_AEAD_SET_IVLEN:
2216 arg = 15 - arg;
2217 /* fall-through */
2218
2219 case EVP_CTRL_CCM_SET_L:
2220 if (arg < 2 || arg > 8)
2221 return 0;
2222
2223 cctx->aes.ccm.l = arg;
2224 return 1;
2225
2226 case EVP_CTRL_AEAD_SET_TAG:
2227 if ((arg & 1) || arg < 4 || arg > 16)
2228 return 0;
2229
ed576acd 2230 enc = EVP_CIPHER_CTX_is_encrypting(c);
39f5b069
PS
2231 if (enc && ptr)
2232 return 0;
2233
2234 if (ptr) {
2235 cctx->aes.ccm.tag_set = 1;
2236 buf = EVP_CIPHER_CTX_buf_noconst(c);
2237 memcpy(buf, ptr, arg);
2238 }
2239
2240 cctx->aes.ccm.m = arg;
2241 return 1;
2242
2243 case EVP_CTRL_AEAD_GET_TAG:
ed576acd 2244 enc = EVP_CIPHER_CTX_is_encrypting(c);
39f5b069
PS
2245 if (!enc || !cctx->aes.ccm.tag_set)
2246 return 0;
2247
1287dabd 2248 if (arg < cctx->aes.ccm.m)
39f5b069
PS
2249 return 0;
2250
2251 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2252 cctx->aes.ccm.tag_set = 0;
2253 cctx->aes.ccm.iv_set = 0;
2254 cctx->aes.ccm.len_set = 0;
2255 return 1;
2256
2257 case EVP_CTRL_COPY:
2258 return 1;
2259
2260 default:
2261 return -1;
2262 }
2263}
2264
96530eea
PS
2265# define s390x_aes_ccm_cleanup aes_ccm_cleanup
2266
2267# ifndef OPENSSL_NO_OCB
dd6b2706 2268# define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
96530eea
PS
2269
2270# define s390x_aes_ocb_init_key aes_ocb_init_key
2271static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2272 const unsigned char *iv, int enc);
2273# define s390x_aes_ocb_cipher aes_ocb_cipher
2274static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2275 const unsigned char *in, size_t len);
2276# define s390x_aes_ocb_cleanup aes_ocb_cleanup
2277static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2278# define s390x_aes_ocb_ctrl aes_ocb_ctrl
2279static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2280# endif
2281
e74be3d4
RL
2282# ifndef OPENSSL_NO_SIV
2283# define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
e74be3d4
RL
2284
2285# define s390x_aes_siv_init_key aes_siv_init_key
2286# define s390x_aes_siv_cipher aes_siv_cipher
2287# define s390x_aes_siv_cleanup aes_siv_cleanup
2288# define s390x_aes_siv_ctrl aes_siv_ctrl
2289# endif
2290
dd6b2706
P
2291# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2292 MODE,flags) \
2293static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2294 nid##_##keylen##_##nmode,blocksize, \
2295 keylen / 8, \
2296 ivlen, \
2297 flags | EVP_CIPH_##MODE##_MODE, \
c39352e4 2298 EVP_ORIG_GLOBAL, \
dd6b2706
P
2299 s390x_aes_##mode##_init_key, \
2300 s390x_aes_##mode##_cipher, \
2301 NULL, \
2302 sizeof(S390X_AES_##MODE##_CTX), \
2303 NULL, \
2304 NULL, \
2305 NULL, \
2306 NULL \
2307}; \
2308static const EVP_CIPHER aes_##keylen##_##mode = { \
2309 nid##_##keylen##_##nmode, \
2310 blocksize, \
2311 keylen / 8, \
2312 ivlen, \
2313 flags | EVP_CIPH_##MODE##_MODE, \
c39352e4 2314 EVP_ORIG_GLOBAL, \
dd6b2706
P
2315 aes_init_key, \
2316 aes_##mode##_cipher, \
2317 NULL, \
2318 sizeof(EVP_AES_KEY), \
2319 NULL, \
2320 NULL, \
2321 NULL, \
2322 NULL \
2323}; \
2324const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2325{ \
2326 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2327 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
96530eea
PS
2328}
2329
2330# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
dd6b2706
P
2331static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2332 nid##_##keylen##_##mode, \
2333 blocksize, \
2334 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2335 ivlen, \
2336 flags | EVP_CIPH_##MODE##_MODE, \
c39352e4 2337 EVP_ORIG_GLOBAL, \
dd6b2706
P
2338 s390x_aes_##mode##_init_key, \
2339 s390x_aes_##mode##_cipher, \
2340 s390x_aes_##mode##_cleanup, \
2341 sizeof(S390X_AES_##MODE##_CTX), \
2342 NULL, \
2343 NULL, \
2344 s390x_aes_##mode##_ctrl, \
2345 NULL \
2346}; \
2347static const EVP_CIPHER aes_##keylen##_##mode = { \
2348 nid##_##keylen##_##mode,blocksize, \
2349 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2350 ivlen, \
2351 flags | EVP_CIPH_##MODE##_MODE, \
c39352e4 2352 EVP_ORIG_GLOBAL, \
dd6b2706
P
2353 aes_##mode##_init_key, \
2354 aes_##mode##_cipher, \
2355 aes_##mode##_cleanup, \
2356 sizeof(EVP_AES_##MODE##_CTX), \
2357 NULL, \
2358 NULL, \
2359 aes_##mode##_ctrl, \
2360 NULL \
2361}; \
2362const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2363{ \
2364 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2365 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
96530eea
PS
2366}
2367
5158c763 2368#else
17f121de 2369
5158c763 2370# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
17f121de 2371static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e
MC
2372 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2373 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 2374 EVP_ORIG_GLOBAL, \
0f113f3e
MC
2375 aes_init_key, \
2376 aes_##mode##_cipher, \
2377 NULL, \
2378 sizeof(EVP_AES_KEY), \
2379 NULL,NULL,NULL,NULL }; \
17f121de
AP
2380const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2381{ return &aes_##keylen##_##mode; }
d1fff483 2382
5158c763 2383# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
17f121de 2384static const EVP_CIPHER aes_##keylen##_##mode = { \
0f113f3e 2385 nid##_##keylen##_##mode,blocksize, \
b1ceb439
TS
2386 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2387 ivlen, \
0f113f3e 2388 flags|EVP_CIPH_##MODE##_MODE, \
f6c95e46 2389 EVP_ORIG_GLOBAL, \
0f113f3e
MC
2390 aes_##mode##_init_key, \
2391 aes_##mode##_cipher, \
2392 aes_##mode##_cleanup, \
2393 sizeof(EVP_AES_##MODE##_CTX), \
2394 NULL,NULL,aes_##mode##_ctrl,NULL }; \
17f121de
AP
2395const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2396{ return &aes_##keylen##_##mode; }
9575d1a9 2397
5158c763 2398#endif
9575d1a9 2399
5158c763 2400#define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
0f113f3e
MC
2401 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2402 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2403 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2404 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2405 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2406 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2407 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
d1fff483
AP
2408
2409static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
2410 const unsigned char *iv, int enc)
2411{
2412 int ret, mode;
6435f0f6 2413 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
80ce874a
P
2414 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
2415
2416 if (keylen <= 0) {
2417 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
2418 return 0;
2419 }
0f113f3e 2420
ed576acd 2421 mode = EVP_CIPHER_CTX_get_mode(ctx);
0f113f3e 2422 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
c01a3c6d 2423 && !enc) {
5158c763 2424#ifdef HWAES_CAPABLE
0f113f3e 2425 if (HWAES_CAPABLE) {
80ce874a 2426 ret = HWAES_set_decrypt_key(key, keylen, &dat->ks.ks);
0f113f3e
MC
2427 dat->block = (block128_f) HWAES_decrypt;
2428 dat->stream.cbc = NULL;
5158c763 2429# ifdef HWAES_cbc_encrypt
0f113f3e
MC
2430 if (mode == EVP_CIPH_CBC_MODE)
2431 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
0f113f3e 2432# endif
5158c763
MC
2433 } else
2434#endif
2435#ifdef BSAES_CAPABLE
0f113f3e 2436 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
80ce874a 2437 ret = AES_set_decrypt_key(key, keylen, &dat->ks.ks);
0f113f3e 2438 dat->block = (block128_f) AES_decrypt;
3675334e 2439 dat->stream.cbc = (cbc128_f) ossl_bsaes_cbc_encrypt;
0f113f3e 2440 } else
5158c763
MC
2441#endif
2442#ifdef VPAES_CAPABLE
0f113f3e 2443 if (VPAES_CAPABLE) {
80ce874a 2444 ret = vpaes_set_decrypt_key(key, keylen, &dat->ks.ks);
0f113f3e
MC
2445 dat->block = (block128_f) vpaes_decrypt;
2446 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2447 (cbc128_f) vpaes_cbc_encrypt : NULL;
2448 } else
5158c763 2449#endif
0f113f3e 2450 {
80ce874a 2451 ret = AES_set_decrypt_key(key, keylen, &dat->ks.ks);
0f113f3e
MC
2452 dat->block = (block128_f) AES_decrypt;
2453 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2454 (cbc128_f) AES_cbc_encrypt : NULL;
c01a3c6d 2455 }
0f113f3e 2456 } else
5158c763 2457#ifdef HWAES_CAPABLE
0f113f3e 2458 if (HWAES_CAPABLE) {
80ce874a 2459 ret = HWAES_set_encrypt_key(key, keylen, &dat->ks.ks);
0f113f3e
MC
2460 dat->block = (block128_f) HWAES_encrypt;
2461 dat->stream.cbc = NULL;
5158c763 2462# ifdef HWAES_cbc_encrypt
0f113f3e
MC
2463 if (mode == EVP_CIPH_CBC_MODE)
2464 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2465 else
5158c763
MC
2466# endif
2467# ifdef HWAES_ctr32_encrypt_blocks
0f113f3e
MC
2468 if (mode == EVP_CIPH_CTR_MODE)
2469 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2470 else
5158c763 2471# endif
0f113f3e
MC
2472 (void)0; /* terminate potentially open 'else' */
2473 } else
5158c763
MC
2474#endif
2475#ifdef BSAES_CAPABLE
0f113f3e 2476 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
80ce874a 2477 ret = AES_set_encrypt_key(key, keylen, &dat->ks.ks);
0f113f3e 2478 dat->block = (block128_f) AES_encrypt;
3675334e 2479 dat->stream.ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks;
0f113f3e 2480 } else
5158c763
MC
2481#endif
2482#ifdef VPAES_CAPABLE
0f113f3e 2483 if (VPAES_CAPABLE) {
80ce874a 2484 ret = vpaes_set_encrypt_key(key, keylen, &dat->ks.ks);
0f113f3e
MC
2485 dat->block = (block128_f) vpaes_encrypt;
2486 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2487 (cbc128_f) vpaes_cbc_encrypt : NULL;
2488 } else
5158c763 2489#endif
0f113f3e 2490 {
80ce874a 2491 ret = AES_set_encrypt_key(key, keylen, &dat->ks.ks);
0f113f3e
MC
2492 dat->block = (block128_f) AES_encrypt;
2493 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2494 (cbc128_f) AES_cbc_encrypt : NULL;
5158c763 2495#ifdef AES_CTR_ASM
0f113f3e
MC
2496 if (mode == EVP_CIPH_CTR_MODE)
2497 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
5158c763 2498#endif
0f113f3e 2499 }
d1fff483 2500
0f113f3e 2501 if (ret < 0) {
9311d0c4 2502 ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
0f113f3e
MC
2503 return 0;
2504 }
d1fff483 2505
0f113f3e
MC
2506 return 1;
2507}
d1fff483 2508
0f113f3e
MC
2509static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2510 const unsigned char *in, size_t len)
17f121de 2511{
6435f0f6 2512 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2513
0f113f3e 2514 if (dat->stream.cbc)
9197c226 2515 (*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv,
ed576acd
TM
2516 EVP_CIPHER_CTX_is_encrypting(ctx));
2517 else if (EVP_CIPHER_CTX_is_encrypting(ctx))
9197c226
BK
2518 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
2519 dat->block);
0f113f3e 2520 else
6435f0f6 2521 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
9197c226 2522 ctx->iv, dat->block);
17f121de 2523
0f113f3e 2524 return 1;
17f121de
AP
2525}
2526
0f113f3e
MC
2527static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2528 const unsigned char *in, size_t len)
17f121de 2529{
ed576acd 2530 size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
0f113f3e 2531 size_t i;
6435f0f6 2532 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
17f121de 2533
0f113f3e
MC
2534 if (len < bl)
2535 return 1;
17f121de 2536
0f113f3e
MC
2537 for (i = 0, len -= bl; i <= len; i += bl)
2538 (*dat->block) (in + i, out + i, &dat->ks);
17f121de 2539
0f113f3e 2540 return 1;
17f121de 2541}
deb2c1a1 2542
0f113f3e
MC
2543static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2544 const unsigned char *in, size_t len)
17f121de 2545{
6435f0f6 2546 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2547
ed576acd 2548 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2549 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
9197c226 2550 ctx->iv, &num, dat->block);
6435f0f6 2551 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2552 return 1;
17f121de 2553}
deb2c1a1 2554
0f113f3e
MC
2555static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2556 const unsigned char *in, size_t len)
17f121de 2557{
6435f0f6 2558 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2559
ed576acd 2560 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2561 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
9197c226 2562 ctx->iv, &num,
ed576acd 2563 EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
6435f0f6 2564 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2565 return 1;
17f121de
AP
2566}
2567
0f113f3e
MC
2568static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2569 const unsigned char *in, size_t len)
17f121de 2570{
6435f0f6 2571 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
8ca28da0 2572
ed576acd 2573 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2574 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
9197c226 2575 ctx->iv, &num,
ed576acd 2576 EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
6435f0f6 2577 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2578 return 1;
17f121de 2579}
8d1ebe0b 2580
0f113f3e
MC
2581static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2582 const unsigned char *in, size_t len)
17f121de 2583{
6435f0f6 2584 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2585
6435f0f6 2586 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
ed576acd 2587 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2588 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
9197c226 2589 ctx->iv, &num,
ed576acd 2590 EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
6435f0f6 2591 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e
MC
2592 return 1;
2593 }
2594
2595 while (len >= MAXBITCHUNK) {
ed576acd 2596 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2597 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
9197c226 2598 ctx->iv, &num,
ed576acd 2599 EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
6435f0f6 2600 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2601 len -= MAXBITCHUNK;
604e591e
BE
2602 out += MAXBITCHUNK;
2603 in += MAXBITCHUNK;
0f113f3e 2604 }
6435f0f6 2605 if (len) {
ed576acd 2606 int num = EVP_CIPHER_CTX_get_num(ctx);
0f113f3e 2607 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
9197c226 2608 ctx->iv, &num,
ed576acd 2609 EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
6435f0f6
RL
2610 EVP_CIPHER_CTX_set_num(ctx, num);
2611 }
0f113f3e
MC
2612
2613 return 1;
17f121de 2614}
8d1ebe0b 2615
0f113f3e
MC
2616static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2617 const unsigned char *in, size_t len)
d976f992 2618{
042f8f70
P
2619 int n = EVP_CIPHER_CTX_get_num(ctx);
2620 unsigned int num;
6435f0f6 2621 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
0f113f3e 2622
042f8f70
P
2623 if (n < 0)
2624 return 0;
2625 num = (unsigned int)n;
2626
0f113f3e
MC
2627 if (dat->stream.ctr)
2628 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
9197c226 2629 ctx->iv,
6435f0f6
RL
2630 EVP_CIPHER_CTX_buf_noconst(ctx),
2631 &num, dat->stream.ctr);
0f113f3e
MC
2632 else
2633 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
9197c226 2634 ctx->iv,
6435f0f6
RL
2635 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2636 dat->block);
2637 EVP_CIPHER_CTX_set_num(ctx, num);
0f113f3e 2638 return 1;
d976f992
AP
2639}
2640
0f113f3e
MC
2641BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2642 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2643 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
bdaa5415
DSH
2644
2645static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
0f113f3e 2646{
6435f0f6 2647 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
273a0218
BE
2648 if (gctx == NULL)
2649 return 0;
0f113f3e 2650 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
9197c226 2651 if (gctx->iv != c->iv)
0f113f3e
MC
2652 OPENSSL_free(gctx->iv);
2653 return 1;
2654}
bdaa5415
DSH
2655
2656static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 2657{
6435f0f6 2658 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
0f113f3e
MC
2659 switch (type) {
2660 case EVP_CTRL_INIT:
2661 gctx->key_set = 0;
2662 gctx->iv_set = 0;
ed576acd 2663 gctx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
c5307d9c 2664 gctx->iv = c->iv;
0f113f3e
MC
2665 gctx->taglen = -1;
2666 gctx->iv_gen = 0;
2667 gctx->tls_aad_len = -1;
2668 return 1;
2669
7dddf2fc
SL
2670 case EVP_CTRL_GET_IVLEN:
2671 *(int *)ptr = gctx->ivlen;
2672 return 1;
2673
e640fa02 2674 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e
MC
2675 if (arg <= 0)
2676 return 0;
2677 /* Allocate memory for IV if needed */
2678 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
c5307d9c 2679 if (gctx->iv != c->iv)
0f113f3e 2680 OPENSSL_free(gctx->iv);
e077455e 2681 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL)
0f113f3e
MC
2682 return 0;
2683 }
2684 gctx->ivlen = arg;
2685 return 1;
2686
e640fa02 2687 case EVP_CTRL_AEAD_SET_TAG:
c5307d9c 2688 if (arg <= 0 || arg > 16 || c->encrypt)
0f113f3e 2689 return 0;
c5307d9c 2690 memcpy(c->buf, ptr, arg);
0f113f3e
MC
2691 gctx->taglen = arg;
2692 return 1;
2693
e640fa02 2694 case EVP_CTRL_AEAD_GET_TAG:
c5307d9c 2695 if (arg <= 0 || arg > 16 || !c->encrypt
6435f0f6 2696 || gctx->taglen < 0)
0f113f3e 2697 return 0;
c5307d9c 2698 memcpy(ptr, c->buf, arg);
0f113f3e
MC
2699 return 1;
2700
2701 case EVP_CTRL_GCM_SET_IV_FIXED:
2702 /* Special case: -1 length restores whole IV */
2703 if (arg == -1) {
2704 memcpy(gctx->iv, ptr, gctx->ivlen);
2705 gctx->iv_gen = 1;
2706 return 1;
2707 }
2708 /*
2709 * Fixed field must be at least 4 bytes and invocation field at least
2710 * 8.
2711 */
2712 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2713 return 0;
2714 if (arg)
2715 memcpy(gctx->iv, ptr, arg);
c5307d9c 2716 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
16cfc2c9 2717 return 0;
0f113f3e
MC
2718 gctx->iv_gen = 1;
2719 return 1;
2720
2721 case EVP_CTRL_GCM_IV_GEN:
2722 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2723 return 0;
2724 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2725 if (arg <= 0 || arg > gctx->ivlen)
2726 arg = gctx->ivlen;
2727 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2728 /*
2729 * Invocation field will be at least 8 bytes in size and so no need
2730 * to check wrap around or increment more than last 8 bytes.
2731 */
2732 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2733 gctx->iv_set = 1;
2734 return 1;
2735
2736 case EVP_CTRL_GCM_SET_IV_INV:
c5307d9c 2737 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
0f113f3e
MC
2738 return 0;
2739 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2740 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2741 gctx->iv_set = 1;
2742 return 1;
2743
2744 case EVP_CTRL_AEAD_TLS1_AAD:
2745 /* Save the AAD for later use */
c8269881 2746 if (arg != EVP_AEAD_TLS1_AAD_LEN)
0f113f3e 2747 return 0;
c5307d9c 2748 memcpy(c->buf, ptr, arg);
0f113f3e 2749 gctx->tls_aad_len = arg;
d6b34570 2750 gctx->tls_enc_records = 0;
0f113f3e 2751 {
c5307d9c 2752 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
0f113f3e 2753 /* Correct length for explicit IV */
2198b3a5
AP
2754 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2755 return 0;
0f113f3e
MC
2756 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2757 /* If decrypting correct for tag too */
c5307d9c 2758 if (!c->encrypt) {
2198b3a5
AP
2759 if (len < EVP_GCM_TLS_TAG_LEN)
2760 return 0;
0f113f3e 2761 len -= EVP_GCM_TLS_TAG_LEN;
2198b3a5 2762 }
c5307d9c
AP
2763 c->buf[arg - 2] = len >> 8;
2764 c->buf[arg - 1] = len & 0xff;
0f113f3e
MC
2765 }
2766 /* Extra padding: tag appended to record */
2767 return EVP_GCM_TLS_TAG_LEN;
2768
2769 case EVP_CTRL_COPY:
2770 {
2771 EVP_CIPHER_CTX *out = ptr;
6435f0f6 2772 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
0f113f3e
MC
2773 if (gctx->gcm.key) {
2774 if (gctx->gcm.key != &gctx->ks)
2775 return 0;
2776 gctx_out->gcm.key = &gctx_out->ks;
2777 }
c5307d9c
AP
2778 if (gctx->iv == c->iv)
2779 gctx_out->iv = out->iv;
0f113f3e 2780 else {
e077455e 2781 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL)
0f113f3e
MC
2782 return 0;
2783 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2784 }
2785 return 1;
2786 }
2787
2788 default:
2789 return -1;
2790
2791 }
2792}
bdaa5415
DSH
2793
2794static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
2795 const unsigned char *iv, int enc)
2796{
6435f0f6 2797 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
80ce874a
P
2798
2799 if (iv == NULL && key == NULL)
0f113f3e 2800 return 1;
80ce874a
P
2801
2802 if (key != NULL) {
2803 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
2804
2805 if (keylen <= 0) {
2806 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
2807 return 0;
2808 }
0f113f3e 2809 do {
5158c763 2810#ifdef HWAES_CAPABLE
0f113f3e 2811 if (HWAES_CAPABLE) {
80ce874a 2812 HWAES_set_encrypt_key(key, keylen, &gctx->ks.ks);
0f113f3e
MC
2813 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2814 (block128_f) HWAES_encrypt);
5158c763 2815# ifdef HWAES_ctr32_encrypt_blocks
0f113f3e 2816 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
5158c763 2817# else
0f113f3e 2818 gctx->ctr = NULL;
5158c763 2819# endif
0f113f3e
MC
2820 break;
2821 } else
5158c763
MC
2822#endif
2823#ifdef BSAES_CAPABLE
0f113f3e 2824 if (BSAES_CAPABLE) {
80ce874a 2825 AES_set_encrypt_key(key, keylen, &gctx->ks.ks);
0f113f3e
MC
2826 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2827 (block128_f) AES_encrypt);
3675334e 2828 gctx->ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks;
0f113f3e
MC
2829 break;
2830 } else
5158c763
MC
2831#endif
2832#ifdef VPAES_CAPABLE
0f113f3e 2833 if (VPAES_CAPABLE) {
80ce874a 2834 vpaes_set_encrypt_key(key, keylen, &gctx->ks.ks);
0f113f3e
MC
2835 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2836 (block128_f) vpaes_encrypt);
2837 gctx->ctr = NULL;
2838 break;
2839 } else
5158c763 2840#endif
0f113f3e
MC
2841 (void)0; /* terminate potentially open 'else' */
2842
80ce874a 2843 AES_set_encrypt_key(key, keylen, &gctx->ks.ks);
0f113f3e
MC
2844 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2845 (block128_f) AES_encrypt);
5158c763 2846#ifdef AES_CTR_ASM
0f113f3e 2847 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
5158c763 2848#else
0f113f3e 2849 gctx->ctr = NULL;
5158c763 2850#endif
0f113f3e
MC
2851 } while (0);
2852
2853 /*
2854 * If we have an iv can set it directly, otherwise use saved IV.
2855 */
2856 if (iv == NULL && gctx->iv_set)
2857 iv = gctx->iv;
2858 if (iv) {
2859 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2860 gctx->iv_set = 1;
2861 }
2862 gctx->key_set = 1;
2863 } else {
2864 /* If key set use IV, otherwise copy */
2865 if (gctx->key_set)
2866 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2867 else
2868 memcpy(gctx->iv, iv, gctx->ivlen);
2869 gctx->iv_set = 1;
2870 gctx->iv_gen = 0;
2871 }
2872 return 1;
2873}
2874
2875/*
2876 * Handle TLS GCM packet format. This consists of the last portion of the IV
28dd49fa
DSH
2877 * followed by the payload and finally the tag. On encrypt generate IV,
2878 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2879 * and verify tag.
2880 */
2881
2882static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
2883 const unsigned char *in, size_t len)
2884{
6435f0f6 2885 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
0f113f3e
MC
2886 int rv = -1;
2887 /* Encrypt/decrypt must be performed in place */
2888 if (out != in
2889 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2890 return -1;
df443918 2891
d6b34570
P
2892 /*
2893 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2894 * Requirements from SP 800-38D". The requirements is for one party to the
2895 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
2896 * side only.
2897 */
b134300a 2898 if (EVP_CIPHER_CTX_is_encrypting(ctx) && ++gctx->tls_enc_records == 0) {
9311d0c4 2899 ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
d6b34570
P
2900 goto err;
2901 }
2902
0f113f3e
MC
2903 /*
2904 * Set IV from start of buffer or generate IV and write to start of
2905 * buffer.
2906 */
b134300a
TZ
2907 if (EVP_CIPHER_CTX_ctrl(ctx,
2908 EVP_CIPHER_CTX_is_encrypting(ctx) ?
2909 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
0f113f3e
MC
2910 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2911 goto err;
2912 /* Use saved AAD */
b134300a
TZ
2913 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
2914 gctx->tls_aad_len))
0f113f3e
MC
2915 goto err;
2916 /* Fix buffer and length to point to payload */
2917 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2918 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2919 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
b134300a 2920 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
2921 /* Encrypt payload */
2922 if (gctx->ctr) {
2923 size_t bulk = 0;
5158c763 2924#if defined(AES_GCM_ASM)
0f113f3e
MC
2925 if (len >= 32 && AES_GCM_ASM(gctx)) {
2926 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2927 return -1;
2928
2929 bulk = AES_gcm_encrypt(in, out, len,
2930 gctx->gcm.key,
2931 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2932 gctx->gcm.len.u[1] += bulk;
2933 }
5158c763 2934#endif
0f113f3e
MC
2935 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2936 in + bulk,
2937 out + bulk,
2938 len - bulk, gctx->ctr))
2939 goto err;
2940 } else {
2941 size_t bulk = 0;
5158c763 2942#if defined(AES_GCM_ASM2)
0f113f3e
MC
2943 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2944 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2945 return -1;
2946
2947 bulk = AES_gcm_encrypt(in, out, len,
2948 gctx->gcm.key,
2949 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2950 gctx->gcm.len.u[1] += bulk;
2951 }
5158c763 2952#endif
0f113f3e
MC
2953 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2954 in + bulk, out + bulk, len - bulk))
2955 goto err;
2956 }
2957 out += len;
2958 /* Finally write tag */
2959 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2960 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2961 } else {
2962 /* Decrypt */
2963 if (gctx->ctr) {
2964 size_t bulk = 0;
5158c763 2965#if defined(AES_GCM_ASM)
0f113f3e
MC
2966 if (len >= 16 && AES_GCM_ASM(gctx)) {
2967 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2968 return -1;
2969
2970 bulk = AES_gcm_decrypt(in, out, len,
2971 gctx->gcm.key,
2972 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2973 gctx->gcm.len.u[1] += bulk;
2974 }
5158c763 2975#endif
0f113f3e
MC
2976 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2977 in + bulk,
2978 out + bulk,
2979 len - bulk, gctx->ctr))
2980 goto err;
2981 } else {
2982 size_t bulk = 0;
5158c763 2983#if defined(AES_GCM_ASM2)
0f113f3e
MC
2984 if (len >= 16 && AES_GCM_ASM2(gctx)) {
2985 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2986 return -1;
2987
2988 bulk = AES_gcm_decrypt(in, out, len,
2989 gctx->gcm.key,
2990 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2991 gctx->gcm.len.u[1] += bulk;
2992 }
5158c763 2993#endif
0f113f3e
MC
2994 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2995 in + bulk, out + bulk, len - bulk))
2996 goto err;
2997 }
2998 /* Retrieve tag */
b134300a
TZ
2999 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
3000 EVP_GCM_TLS_TAG_LEN);
0f113f3e 3001 /* If tag mismatch wipe buffer */
b134300a
TZ
3002 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
3003 EVP_GCM_TLS_TAG_LEN)) {
0f113f3e
MC
3004 OPENSSL_cleanse(out, len);
3005 goto err;
3006 }
3007 rv = len;
3008 }
3009
3010 err:
3011 gctx->iv_set = 0;
3012 gctx->tls_aad_len = -1;
3013 return rv;
3014}
28dd49fa 3015
f844f9eb 3016#ifdef FIPS_MODULE
bcf082d1
SL
3017/*
3018 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
3019 *
3020 * See also 8.2.2 RBG-based construction.
3021 * Random construction consists of a free field (which can be NULL) and a
3022 * random field which will use a DRBG that can return at least 96 bits of
3023 * entropy strength. (The DRBG must be seeded by the FIPS module).
3024 */
3025static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
3026{
3027 int sz = gctx->ivlen - offset;
3028
3029 /* Must be at least 96 bits */
3030 if (sz <= 0 || gctx->ivlen < 12)
3031 return 0;
3032
3033 /* Use DRBG to generate random iv */
3034 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
3035 return 0;
3036 return 1;
3037}
f844f9eb 3038#endif /* FIPS_MODULE */
bcf082d1 3039
17f121de 3040static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3041 const unsigned char *in, size_t len)
3042{
6435f0f6 3043 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
bcf082d1 3044
0f113f3e
MC
3045 /* If not set up, return error */
3046 if (!gctx->key_set)
3047 return -1;
3048
3049 if (gctx->tls_aad_len >= 0)
3050 return aes_gcm_tls_cipher(ctx, out, in, len);
3051
f844f9eb 3052#ifdef FIPS_MODULE
bcf082d1
SL
3053 /*
3054 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
3055 * The IV can still be set externally (the security policy will state that
3056 * this is not FIPS compliant). There are some applications
3057 * where setting the IV externally is the only option available.
3058 */
3059 if (!gctx->iv_set) {
b134300a 3060 if (!EVP_CIPHER_CTX_is_encrypting(ctx) || !aes_gcm_iv_generate(gctx, 0))
bcf082d1
SL
3061 return -1;
3062 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
3063 gctx->iv_set = 1;
3064 gctx->iv_gen_rand = 1;
3065 }
3066#else
0f113f3e
MC
3067 if (!gctx->iv_set)
3068 return -1;
f844f9eb 3069#endif /* FIPS_MODULE */
bcf082d1 3070
0f113f3e
MC
3071 if (in) {
3072 if (out == NULL) {
3073 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3074 return -1;
b134300a 3075 } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
3076 if (gctx->ctr) {
3077 size_t bulk = 0;
5158c763 3078#if defined(AES_GCM_ASM)
0f113f3e
MC
3079 if (len >= 32 && AES_GCM_ASM(gctx)) {
3080 size_t res = (16 - gctx->gcm.mres) % 16;
3081
3082 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3083 return -1;
3084
3085 bulk = AES_gcm_encrypt(in + res,
3086 out + res, len - res,
3087 gctx->gcm.key, gctx->gcm.Yi.c,
3088 gctx->gcm.Xi.u);
3089 gctx->gcm.len.u[1] += bulk;
3090 bulk += res;
3091 }
5158c763 3092#endif
0f113f3e
MC
3093 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3094 in + bulk,
3095 out + bulk,
3096 len - bulk, gctx->ctr))
3097 return -1;
3098 } else {
3099 size_t bulk = 0;
5158c763 3100#if defined(AES_GCM_ASM2)
0f113f3e
MC
3101 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3102 size_t res = (16 - gctx->gcm.mres) % 16;
3103
3104 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3105 return -1;
3106
3107 bulk = AES_gcm_encrypt(in + res,
3108 out + res, len - res,
3109 gctx->gcm.key, gctx->gcm.Yi.c,
3110 gctx->gcm.Xi.u);
3111 gctx->gcm.len.u[1] += bulk;
3112 bulk += res;
3113 }
5158c763 3114#endif
0f113f3e
MC
3115 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3116 in + bulk, out + bulk, len - bulk))
3117 return -1;
3118 }
3119 } else {
3120 if (gctx->ctr) {
3121 size_t bulk = 0;
5158c763 3122#if defined(AES_GCM_ASM)
0f113f3e
MC
3123 if (len >= 16 && AES_GCM_ASM(gctx)) {
3124 size_t res = (16 - gctx->gcm.mres) % 16;
3125
3126 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3127 return -1;
3128
3129 bulk = AES_gcm_decrypt(in + res,
3130 out + res, len - res,
3131 gctx->gcm.key,
3132 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3133 gctx->gcm.len.u[1] += bulk;
3134 bulk += res;
3135 }
5158c763 3136#endif
0f113f3e
MC
3137 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3138 in + bulk,
3139 out + bulk,
3140 len - bulk, gctx->ctr))
3141 return -1;
3142 } else {
3143 size_t bulk = 0;
5158c763 3144#if defined(AES_GCM_ASM2)
0f113f3e
MC
3145 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3146 size_t res = (16 - gctx->gcm.mres) % 16;
3147
3148 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3149 return -1;
3150
3151 bulk = AES_gcm_decrypt(in + res,
3152 out + res, len - res,
3153 gctx->gcm.key,
3154 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3155 gctx->gcm.len.u[1] += bulk;
3156 bulk += res;
3157 }
5158c763 3158#endif
0f113f3e
MC
3159 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3160 in + bulk, out + bulk, len - bulk))
3161 return -1;
3162 }
3163 }
3164 return len;
3165 } else {
b134300a 3166 if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
3167 if (gctx->taglen < 0)
3168 return -1;
b134300a
TZ
3169 if (CRYPTO_gcm128_finish(&gctx->gcm,
3170 EVP_CIPHER_CTX_buf_noconst(ctx),
3171 gctx->taglen) != 0)
0f113f3e
MC
3172 return -1;
3173 gctx->iv_set = 0;
3174 return 0;
3175 }
b134300a 3176 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
0f113f3e
MC
3177 gctx->taglen = 16;
3178 /* Don't reuse the IV */
3179 gctx->iv_set = 0;
3180 return 0;
3181 }
3182
3183}
3184
5158c763 3185#define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
0f113f3e
MC
3186 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3187 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
7dddf2fc 3188 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
0f113f3e
MC
3189
3190BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3191 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
1c5a4e3b 3192BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
0f113f3e 3193 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
1c5a4e3b 3194BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
0f113f3e 3195 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
32a2d8dd
DSH
3196
3197static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3198{
2c840201
P
3199 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3200
0f113f3e
MC
3201 if (type == EVP_CTRL_COPY) {
3202 EVP_CIPHER_CTX *out = ptr;
6435f0f6 3203 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
2c840201 3204
0f113f3e
MC
3205 if (xctx->xts.key1) {
3206 if (xctx->xts.key1 != &xctx->ks1)
3207 return 0;
3208 xctx_out->xts.key1 = &xctx_out->ks1;
3209 }
3210 if (xctx->xts.key2) {
3211 if (xctx->xts.key2 != &xctx->ks2)
3212 return 0;
3213 xctx_out->xts.key2 = &xctx_out->ks2;
3214 }
3215 return 1;
3216 } else if (type != EVP_CTRL_INIT)
3217 return -1;
3218 /* key1 and key2 are used as an indicator both key and IV are set */
3219 xctx->xts.key1 = NULL;
3220 xctx->xts.key2 = NULL;
3221 return 1;
3222}
32a2d8dd
DSH
3223
3224static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3225 const unsigned char *iv, int enc)
3226{
6435f0f6 3227 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
2c840201 3228
80ce874a 3229 if (iv == NULL && key == NULL)
0f113f3e
MC
3230 return 1;
3231
80ce874a 3232 if (key != NULL) {
0f113f3e 3233 do {
3538b0f7 3234 /* The key is two half length keys in reality */
80ce874a
P
3235 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
3236 const int bytes = keylen / 2;
3538b0f7
P
3237 const int bits = bytes * 8;
3238
80ce874a
P
3239 if (keylen <= 0) {
3240 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3241 return 0;
3242 }
3538b0f7
P
3243 /*
3244 * Verify that the two keys are different.
3245 *
3246 * This addresses the vulnerability described in Rogaway's
3247 * September 2004 paper:
3248 *
3249 * "Efficient Instantiations of Tweakable Blockciphers and
3250 * Refinements to Modes OCB and PMAC".
3251 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3252 *
3253 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3254 * that:
3255 * "The check for Key_1 != Key_2 shall be done at any place
3256 * BEFORE using the keys in the XTS-AES algorithm to process
3257 * data with them."
3258 */
2c840201
P
3259 if ((!allow_insecure_decrypt || enc)
3260 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
9311d0c4 3261 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3538b0f7
P
3262 return 0;
3263 }
3264
5158c763 3265#ifdef AES_XTS_ASM
0f113f3e 3266 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
5158c763 3267#else
0f113f3e 3268 xctx->stream = NULL;
5158c763 3269#endif
0f113f3e 3270 /* key_len is two AES keys */
5158c763 3271#ifdef HWAES_CAPABLE
0f113f3e
MC
3272 if (HWAES_CAPABLE) {
3273 if (enc) {
3538b0f7 3274 HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e 3275 xctx->xts.block1 = (block128_f) HWAES_encrypt;
46f047d7
AP
3276# ifdef HWAES_xts_encrypt
3277 xctx->stream = HWAES_xts_encrypt;
3278# endif
0f113f3e 3279 } else {
3538b0f7 3280 HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e 3281 xctx->xts.block1 = (block128_f) HWAES_decrypt;
46f047d7
AP
3282# ifdef HWAES_xts_decrypt
3283 xctx->stream = HWAES_xts_decrypt;
3284#endif
0f113f3e
MC
3285 }
3286
3538b0f7 3287 HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3288 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3289
3290 xctx->xts.key1 = &xctx->ks1;
3291 break;
3292 } else
5158c763
MC
3293#endif
3294#ifdef BSAES_CAPABLE
0f113f3e 3295 if (BSAES_CAPABLE)
3675334e 3296 xctx->stream = enc ? ossl_bsaes_xts_encrypt : ossl_bsaes_xts_decrypt;
0f113f3e 3297 else
5158c763
MC
3298#endif
3299#ifdef VPAES_CAPABLE
0f113f3e
MC
3300 if (VPAES_CAPABLE) {
3301 if (enc) {
3538b0f7 3302 vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3303 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3304 } else {
3538b0f7 3305 vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3306 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3307 }
3308
3538b0f7 3309 vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3310 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3311
3312 xctx->xts.key1 = &xctx->ks1;
3313 break;
3314 } else
5158c763 3315#endif
0f113f3e
MC
3316 (void)0; /* terminate potentially open 'else' */
3317
3318 if (enc) {
3538b0f7 3319 AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3320 xctx->xts.block1 = (block128_f) AES_encrypt;
3321 } else {
3538b0f7 3322 AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
0f113f3e
MC
3323 xctx->xts.block1 = (block128_f) AES_decrypt;
3324 }
3325
3538b0f7 3326 AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
0f113f3e
MC
3327 xctx->xts.block2 = (block128_f) AES_encrypt;
3328
3329 xctx->xts.key1 = &xctx->ks1;
3330 } while (0);
3538b0f7 3331 }
0f113f3e
MC
3332
3333 if (iv) {
3334 xctx->xts.key2 = &xctx->ks2;
9197c226 3335 memcpy(ctx->iv, iv, 16);
0f113f3e
MC
3336 }
3337
3338 return 1;
3339}
32a2d8dd 3340
17f121de 3341static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3342 const unsigned char *in, size_t len)
3343{
6435f0f6 3344 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
95eda4f0
P
3345
3346 if (xctx->xts.key1 == NULL
3347 || xctx->xts.key2 == NULL
3348 || out == NULL
3349 || in == NULL
3350 || len < AES_BLOCK_SIZE)
0f113f3e 3351 return 0;
95eda4f0 3352
5516c19b 3353 /*
79c44b4e 3354 * Impose a limit of 2^20 blocks per data unit as specified by
5516c19b
P
3355 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3356 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3357 * NIST SP 800-38E mandates the same limit.
3358 */
3359 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
9311d0c4 3360 ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
5516c19b
P
3361 return 0;
3362 }
3363
0f113f3e
MC
3364 if (xctx->stream)
3365 (*xctx->stream) (in, out, len,
6435f0f6 3366 xctx->xts.key1, xctx->xts.key2,
9197c226
BK
3367 ctx->iv);
3368 else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
ed576acd 3369 EVP_CIPHER_CTX_is_encrypting(ctx)))
0f113f3e
MC
3370 return 0;
3371 return 1;
3372}
3373
5158c763 3374#define aes_xts_cleanup NULL
0f113f3e 3375
5158c763 3376#define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
0f113f3e
MC
3377 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3378 | EVP_CIPH_CUSTOM_COPY)
3379
3380BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
1c5a4e3b 3381BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
23916810
DSH
3382
3383static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3384{
6435f0f6 3385 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
0f113f3e
MC
3386 switch (type) {
3387 case EVP_CTRL_INIT:
3388 cctx->key_set = 0;
3389 cctx->iv_set = 0;
3390 cctx->L = 8;
3391 cctx->M = 12;
3392 cctx->tag_set = 0;
3393 cctx->len_set = 0;
e75c5a79
DSH
3394 cctx->tls_aad_len = -1;
3395 return 1;
3396
7dddf2fc
SL
3397 case EVP_CTRL_GET_IVLEN:
3398 *(int *)ptr = 15 - cctx->L;
3399 return 1;
3400
e75c5a79
DSH
3401 case EVP_CTRL_AEAD_TLS1_AAD:
3402 /* Save the AAD for later use */
3403 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3404 return 0;
6435f0f6 3405 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
e75c5a79
DSH
3406 cctx->tls_aad_len = arg;
3407 {
6435f0f6
RL
3408 uint16_t len =
3409 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3410 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
e75c5a79 3411 /* Correct length for explicit IV */
2198b3a5
AP
3412 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3413 return 0;
e75c5a79
DSH
3414 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3415 /* If decrypting correct for tag too */
ed576acd 3416 if (!EVP_CIPHER_CTX_is_encrypting(c)) {
2198b3a5
AP
3417 if (len < cctx->M)
3418 return 0;
e75c5a79 3419 len -= cctx->M;
2198b3a5 3420 }
6435f0f6
RL
3421 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3422 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
e75c5a79
DSH
3423 }
3424 /* Extra padding: tag appended to record */
3425 return cctx->M;
3426
3427 case EVP_CTRL_CCM_SET_IV_FIXED:
3428 /* Sanity check length */
3429 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3430 return 0;
3431 /* Just copy to first part of IV */
9197c226 3432 memcpy(c->iv, ptr, arg);
0f113f3e
MC
3433 return 1;
3434
e640fa02 3435 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e 3436 arg = 15 - arg;
018fcbec 3437 /* fall thru */
0f113f3e
MC
3438 case EVP_CTRL_CCM_SET_L:
3439 if (arg < 2 || arg > 8)
3440 return 0;
3441 cctx->L = arg;
3442 return 1;
3443
e640fa02 3444 case EVP_CTRL_AEAD_SET_TAG:
0f113f3e
MC
3445 if ((arg & 1) || arg < 4 || arg > 16)
3446 return 0;
ed576acd 3447 if (EVP_CIPHER_CTX_is_encrypting(c) && ptr)
0f113f3e
MC
3448 return 0;
3449 if (ptr) {
3450 cctx->tag_set = 1;
6435f0f6 3451 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
0f113f3e
MC
3452 }
3453 cctx->M = arg;
3454 return 1;
3455
e640fa02 3456 case EVP_CTRL_AEAD_GET_TAG:
ed576acd 3457 if (!EVP_CIPHER_CTX_is_encrypting(c) || !cctx->tag_set)
0f113f3e
MC
3458 return 0;
3459 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3460 return 0;
3461 cctx->tag_set = 0;
3462 cctx->iv_set = 0;
3463 cctx->len_set = 0;
3464 return 1;
3465
3466 case EVP_CTRL_COPY:
3467 {
3468 EVP_CIPHER_CTX *out = ptr;
6435f0f6 3469 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
0f113f3e
MC
3470 if (cctx->ccm.key) {
3471 if (cctx->ccm.key != &cctx->ks)
3472 return 0;
3473 cctx_out->ccm.key = &cctx_out->ks;
3474 }
3475 return 1;
3476 }
3477
3478 default:
3479 return -1;
3480
3481 }
3482}
23916810
DSH
3483
3484static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3485 const unsigned char *iv, int enc)
3486{
6435f0f6 3487 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
80ce874a
P
3488
3489 if (iv == NULL && key == NULL)
0f113f3e 3490 return 1;
80ce874a
P
3491
3492 if (key != NULL) {
3493 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
3494
3495 if (keylen <= 0) {
3496 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3497 return 0;
3498 }
0f113f3e 3499 do {
5158c763 3500#ifdef HWAES_CAPABLE
0f113f3e 3501 if (HWAES_CAPABLE) {
80ce874a 3502 HWAES_set_encrypt_key(key, keylen, &cctx->ks.ks);
0f113f3e
MC
3503
3504 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3505 &cctx->ks, (block128_f) HWAES_encrypt);
3506 cctx->str = NULL;
3507 cctx->key_set = 1;
3508 break;
3509 } else
5158c763
MC
3510#endif
3511#ifdef VPAES_CAPABLE
0f113f3e 3512 if (VPAES_CAPABLE) {
80ce874a 3513 vpaes_set_encrypt_key(key, keylen, &cctx->ks.ks);
0f113f3e
MC
3514 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3515 &cctx->ks, (block128_f) vpaes_encrypt);
3516 cctx->str = NULL;
3517 cctx->key_set = 1;
3518 break;
3519 }
5158c763 3520#endif
80ce874a 3521 AES_set_encrypt_key(key, keylen, &cctx->ks.ks);
0f113f3e
MC
3522 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3523 &cctx->ks, (block128_f) AES_encrypt);
3524 cctx->str = NULL;
3525 cctx->key_set = 1;
3526 } while (0);
80ce874a
P
3527 }
3528 if (iv != NULL) {
9197c226 3529 memcpy(ctx->iv, iv, 15 - cctx->L);
0f113f3e
MC
3530 cctx->iv_set = 1;
3531 }
3532 return 1;
3533}
23916810 3534
e75c5a79
DSH
3535static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3536 const unsigned char *in, size_t len)
3537{
6435f0f6 3538 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
e75c5a79
DSH
3539 CCM128_CONTEXT *ccm = &cctx->ccm;
3540 /* Encrypt/decrypt must be performed in place */
3541 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3542 return -1;
3543 /* If encrypting set explicit IV from sequence number (start of AAD) */
ed576acd 3544 if (EVP_CIPHER_CTX_is_encrypting(ctx))
6435f0f6
RL
3545 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3546 EVP_CCM_TLS_EXPLICIT_IV_LEN);
e75c5a79 3547 /* Get rest of IV from explicit IV */
9197c226 3548 memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
6435f0f6 3549 EVP_CCM_TLS_EXPLICIT_IV_LEN);
e75c5a79
DSH
3550 /* Correct length value */
3551 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
9197c226 3552 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
6435f0f6 3553 len))
e75c5a79
DSH
3554 return -1;
3555 /* Use saved AAD */
ed576acd
TM
3556 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx),
3557 cctx->tls_aad_len);
e75c5a79
DSH
3558 /* Fix buffer to point to payload */
3559 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3560 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
ed576acd 3561 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
e75c5a79
DSH
3562 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3563 cctx->str) :
3564 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3565 return -1;
3566 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3567 return -1;
3568 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3569 } else {
3570 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3571 cctx->str) :
3572 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3573 unsigned char tag[16];
3574 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3575 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3576 return len;
3577 }
3578 }
3579 OPENSSL_cleanse(out, len);
3580 return -1;
3581 }
3582}
3583
17f121de 3584static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3585 const unsigned char *in, size_t len)
3586{
6435f0f6 3587 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
0f113f3e
MC
3588 CCM128_CONTEXT *ccm = &cctx->ccm;
3589 /* If not set up, return error */
e75c5a79
DSH
3590 if (!cctx->key_set)
3591 return -1;
3592
3593 if (cctx->tls_aad_len >= 0)
3594 return aes_ccm_tls_cipher(ctx, out, in, len);
3595
197421b1
DSH
3596 /* EVP_*Final() doesn't return any data */
3597 if (in == NULL && out != NULL)
3598 return 0;
3599
e75c5a79 3600 if (!cctx->iv_set)
0f113f3e 3601 return -1;
e75c5a79 3602
0f113f3e
MC
3603 if (!out) {
3604 if (!in) {
9197c226 3605 if (CRYPTO_ccm128_setiv(ccm, ctx->iv,
6435f0f6 3606 15 - cctx->L, len))
0f113f3e
MC
3607 return -1;
3608 cctx->len_set = 1;
3609 return len;
3610 }
3611 /* If have AAD need message length */
3612 if (!cctx->len_set && len)
3613 return -1;
3614 CRYPTO_ccm128_aad(ccm, in, len);
3615 return len;
3616 }
67c81ec3
TN
3617
3618 /* The tag must be set before actually decrypting data */
ed576acd 3619 if (!EVP_CIPHER_CTX_is_encrypting(ctx) && !cctx->tag_set)
67c81ec3
TN
3620 return -1;
3621
0f113f3e
MC
3622 /* If not set length yet do it */
3623 if (!cctx->len_set) {
9197c226 3624 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
0f113f3e
MC
3625 return -1;
3626 cctx->len_set = 1;
3627 }
ed576acd 3628 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
3629 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3630 cctx->str) :
3631 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3632 return -1;
3633 cctx->tag_set = 1;
3634 return len;
3635 } else {
3636 int rv = -1;
3637 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3638 cctx->str) :
3639 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3640 unsigned char tag[16];
3641 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
6435f0f6
RL
3642 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3643 cctx->M))
0f113f3e
MC
3644 rv = len;
3645 }
3646 }
3647 if (rv == -1)
3648 OPENSSL_cleanse(out, len);
3649 cctx->iv_set = 0;
3650 cctx->tag_set = 0;
3651 cctx->len_set = 0;
3652 return rv;
3653 }
0f113f3e
MC
3654}
3655
5158c763 3656#define aes_ccm_cleanup NULL
0f113f3e 3657
e75c5a79
DSH
3658BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3659 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
f6c95e46
RS
3660BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3661 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3662BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3663 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
0f113f3e
MC
3664
3665typedef struct {
3666 union {
39147079 3667 OSSL_UNION_ALIGN;
0f113f3e
MC
3668 AES_KEY ks;
3669 } ks;
3670 /* Indicates if IV has been set */
3671 unsigned char *iv;
3672} EVP_AES_WRAP_CTX;
97cf1f6c
DSH
3673
3674static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3675 const unsigned char *iv, int enc)
3676{
0341ff97 3677 int len;
6435f0f6 3678 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
0341ff97
P
3679
3680 if (iv == NULL && key == NULL)
0f113f3e 3681 return 1;
0341ff97 3682 if (key != NULL) {
80ce874a
P
3683 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
3684
3685 if (keylen <= 0) {
3686 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3687 return 0;
3688 }
ed576acd 3689 if (EVP_CIPHER_CTX_is_encrypting(ctx))
80ce874a 3690 AES_set_encrypt_key(key, keylen, &wctx->ks.ks);
0f113f3e 3691 else
80ce874a 3692 AES_set_decrypt_key(key, keylen, &wctx->ks.ks);
0341ff97 3693 if (iv == NULL)
0f113f3e
MC
3694 wctx->iv = NULL;
3695 }
0341ff97
P
3696 if (iv != NULL) {
3697 if ((len = EVP_CIPHER_CTX_get_iv_length(ctx)) < 0)
3698 return 0;
3699 memcpy(ctx->iv, iv, len);
9197c226 3700 wctx->iv = ctx->iv;
0f113f3e
MC
3701 }
3702 return 1;
3703}
97cf1f6c
DSH
3704
3705static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3706 const unsigned char *in, size_t inlen)
3707{
6435f0f6 3708 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
0f113f3e
MC
3709 size_t rv;
3710 /* AES wrap with padding has IV length of 4, without padding 8 */
ed576acd 3711 int pad = EVP_CIPHER_CTX_get_iv_length(ctx) == 4;
0f113f3e
MC
3712 /* No final operation so always return zero length */
3713 if (!in)
3714 return 0;
3715 /* Input length must always be non-zero */
3716 if (!inlen)
3717 return -1;
3718 /* If decrypting need at least 16 bytes and multiple of 8 */
ed576acd 3719 if (!EVP_CIPHER_CTX_is_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
0f113f3e
MC
3720 return -1;
3721 /* If not padding input must be multiple of 8 */
3722 if (!pad && inlen & 0x7)
3723 return -1;
6d777689 3724 if (ossl_is_partially_overlapping(out, in, inlen)) {
9311d0c4 3725 ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
7141ba31
MC
3726 return 0;
3727 }
0f113f3e 3728 if (!out) {
ed576acd 3729 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
3730 /* If padding round up to multiple of 8 */
3731 if (pad)
3732 inlen = (inlen + 7) / 8 * 8;
3733 /* 8 byte prefix */
3734 return inlen + 8;
3735 } else {
3736 /*
3737 * If not padding output will be exactly 8 bytes smaller than
3738 * input. If padding it will be at least 8 bytes smaller but we
3739 * don't know how much.
3740 */
3741 return inlen - 8;
3742 }
3743 }
3744 if (pad) {
ed576acd 3745 if (EVP_CIPHER_CTX_is_encrypting(ctx))
0f113f3e
MC
3746 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3747 out, in, inlen,
3748 (block128_f) AES_encrypt);
3749 else
3750 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3751 out, in, inlen,
3752 (block128_f) AES_decrypt);
3753 } else {
ed576acd 3754 if (EVP_CIPHER_CTX_is_encrypting(ctx))
0f113f3e
MC
3755 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3756 out, in, inlen, (block128_f) AES_encrypt);
3757 else
3758 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3759 out, in, inlen, (block128_f) AES_decrypt);
3760 }
3761 return rv ? (int)rv : -1;
3762}
3763
5158c763 3764#define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
0f113f3e
MC
3765 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3766 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
97cf1f6c
DSH
3767
3768static const EVP_CIPHER aes_128_wrap = {
0f113f3e 3769 NID_id_aes128_wrap,
f6c95e46 3770 8, 16, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3771 aes_wrap_init_key, aes_wrap_cipher,
3772 NULL,
3773 sizeof(EVP_AES_WRAP_CTX),
3774 NULL, NULL, NULL, NULL
3775};
97cf1f6c
DSH
3776
3777const EVP_CIPHER *EVP_aes_128_wrap(void)
0f113f3e
MC
3778{
3779 return &aes_128_wrap;
3780}
97cf1f6c
DSH
3781
3782static const EVP_CIPHER aes_192_wrap = {
0f113f3e 3783 NID_id_aes192_wrap,
f6c95e46 3784 8, 24, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3785 aes_wrap_init_key, aes_wrap_cipher,
3786 NULL,
3787 sizeof(EVP_AES_WRAP_CTX),
3788 NULL, NULL, NULL, NULL
3789};
97cf1f6c
DSH
3790
3791const EVP_CIPHER *EVP_aes_192_wrap(void)
0f113f3e
MC
3792{
3793 return &aes_192_wrap;
3794}
97cf1f6c
DSH
3795
3796static const EVP_CIPHER aes_256_wrap = {
0f113f3e 3797 NID_id_aes256_wrap,
f6c95e46 3798 8, 32, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3799 aes_wrap_init_key, aes_wrap_cipher,
3800 NULL,
3801 sizeof(EVP_AES_WRAP_CTX),
3802 NULL, NULL, NULL, NULL
3803};
97cf1f6c
DSH
3804
3805const EVP_CIPHER *EVP_aes_256_wrap(void)
0f113f3e
MC
3806{
3807 return &aes_256_wrap;
3808}
97cf1f6c 3809
d31fed73 3810static const EVP_CIPHER aes_128_wrap_pad = {
0f113f3e 3811 NID_id_aes128_wrap_pad,
f6c95e46 3812 8, 16, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3813 aes_wrap_init_key, aes_wrap_cipher,
3814 NULL,
3815 sizeof(EVP_AES_WRAP_CTX),
3816 NULL, NULL, NULL, NULL
3817};
d31fed73
DSH
3818
3819const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
0f113f3e
MC
3820{
3821 return &aes_128_wrap_pad;
3822}
d31fed73
DSH
3823
3824static const EVP_CIPHER aes_192_wrap_pad = {
0f113f3e 3825 NID_id_aes192_wrap_pad,
f6c95e46 3826 8, 24, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3827 aes_wrap_init_key, aes_wrap_cipher,
3828 NULL,
3829 sizeof(EVP_AES_WRAP_CTX),
3830 NULL, NULL, NULL, NULL
3831};
d31fed73
DSH
3832
3833const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
0f113f3e
MC
3834{
3835 return &aes_192_wrap_pad;
3836}
d31fed73
DSH
3837
3838static const EVP_CIPHER aes_256_wrap_pad = {
0f113f3e 3839 NID_id_aes256_wrap_pad,
f6c95e46 3840 8, 32, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
0f113f3e
MC
3841 aes_wrap_init_key, aes_wrap_cipher,
3842 NULL,
3843 sizeof(EVP_AES_WRAP_CTX),
3844 NULL, NULL, NULL, NULL
3845};
d31fed73
DSH
3846
3847const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
0f113f3e
MC
3848{
3849 return &aes_256_wrap_pad;
3850}
d31fed73 3851
5158c763 3852#ifndef OPENSSL_NO_OCB
e6b336ef 3853static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
0f113f3e 3854{
6435f0f6 3855 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
0f113f3e
MC
3856 EVP_CIPHER_CTX *newc;
3857 EVP_AES_OCB_CTX *new_octx;
3858
3859 switch (type) {
3860 case EVP_CTRL_INIT:
3861 octx->key_set = 0;
3862 octx->iv_set = 0;
ed576acd 3863 octx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
9197c226 3864 octx->iv = c->iv;
0f113f3e
MC
3865 octx->taglen = 16;
3866 octx->data_buf_len = 0;
3867 octx->aad_buf_len = 0;
3868 return 1;
3869
7dddf2fc
SL
3870 case EVP_CTRL_GET_IVLEN:
3871 *(int *)ptr = octx->ivlen;
3872 return 1;
3873
e640fa02 3874 case EVP_CTRL_AEAD_SET_IVLEN:
0f113f3e
MC
3875 /* IV len must be 1 to 15 */
3876 if (arg <= 0 || arg > 15)
3877 return 0;
3878
3879 octx->ivlen = arg;
3880 return 1;
3881
e640fa02 3882 case EVP_CTRL_AEAD_SET_TAG:
12a765a5 3883 if (ptr == NULL) {
d57d135c
MC
3884 /* Tag len must be 0 to 16 */
3885 if (arg < 0 || arg > 16)
3886 return 0;
3887
3888 octx->taglen = arg;
3889 return 1;
3890 }
ed576acd 3891 if (arg != octx->taglen || EVP_CIPHER_CTX_is_encrypting(c))
0f113f3e
MC
3892 return 0;
3893 memcpy(octx->tag, ptr, arg);
3894 return 1;
3895
e640fa02 3896 case EVP_CTRL_AEAD_GET_TAG:
ed576acd 3897 if (arg != octx->taglen || !EVP_CIPHER_CTX_is_encrypting(c))
0f113f3e
MC
3898 return 0;
3899
3900 memcpy(ptr, octx->tag, arg);
3901 return 1;
3902
3903 case EVP_CTRL_COPY:
3904 newc = (EVP_CIPHER_CTX *)ptr;
6435f0f6 3905 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
0f113f3e 3906 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
bdc985b1
AP
3907 &new_octx->ksenc.ks,
3908 &new_octx->ksdec.ks);
0f113f3e
MC
3909
3910 default:
3911 return -1;
3912
3913 }
3914}
e6b336ef
MC
3915
3916static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
0f113f3e
MC
3917 const unsigned char *iv, int enc)
3918{
6435f0f6 3919 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
80ce874a
P
3920
3921 if (iv == NULL && key == NULL)
0f113f3e 3922 return 1;
80ce874a
P
3923
3924 if (key != NULL) {
3925 const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
3926
3927 if (keylen <= 0) {
3928 ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3929 return 0;
3930 }
0f113f3e
MC
3931 do {
3932 /*
3933 * We set both the encrypt and decrypt key here because decrypt
3934 * needs both. We could possibly optimise to remove setting the
3935 * decrypt for an encryption operation.
3936 */
5158c763 3937# ifdef HWAES_CAPABLE
02dc0b82 3938 if (HWAES_CAPABLE) {
80ce874a
P
3939 HWAES_set_encrypt_key(key, keylen, &octx->ksenc.ks);
3940 HWAES_set_decrypt_key(key, keylen, &octx->ksdec.ks);
02dc0b82
AP
3941 if (!CRYPTO_ocb128_init(&octx->ocb,
3942 &octx->ksenc.ks, &octx->ksdec.ks,
3943 (block128_f) HWAES_encrypt,
3944 (block128_f) HWAES_decrypt,
3945 enc ? HWAES_ocb_encrypt
3946 : HWAES_ocb_decrypt))
3947 return 0;
3948 break;
3949 }
5158c763
MC
3950# endif
3951# ifdef VPAES_CAPABLE
0f113f3e 3952 if (VPAES_CAPABLE) {
80ce874a
P
3953 vpaes_set_encrypt_key(key, keylen, &octx->ksenc.ks);
3954 vpaes_set_decrypt_key(key, keylen, &octx->ksdec.ks);
bdc985b1
AP
3955 if (!CRYPTO_ocb128_init(&octx->ocb,
3956 &octx->ksenc.ks, &octx->ksdec.ks,
3957 (block128_f) vpaes_encrypt,
bd30091c
AP
3958 (block128_f) vpaes_decrypt,
3959 NULL))
0f113f3e
MC
3960 return 0;
3961 break;
3962 }
5158c763 3963# endif
80ce874a
P
3964 AES_set_encrypt_key(key, keylen, &octx->ksenc.ks);
3965 AES_set_decrypt_key(key, keylen, &octx->ksdec.ks);
bdc985b1
AP
3966 if (!CRYPTO_ocb128_init(&octx->ocb,
3967 &octx->ksenc.ks, &octx->ksdec.ks,
0f113f3e 3968 (block128_f) AES_encrypt,
bd30091c
AP
3969 (block128_f) AES_decrypt,
3970 NULL))
0f113f3e
MC
3971 return 0;
3972 }
3973 while (0);
3974
3975 /*
3976 * If we have an iv we can set it directly, otherwise use saved IV.
3977 */
3978 if (iv == NULL && octx->iv_set)
3979 iv = octx->iv;
3980 if (iv) {
3981 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3982 != 1)
3983 return 0;
3984 octx->iv_set = 1;
3985 }
3986 octx->key_set = 1;
3987 } else {
3988 /* If key set use IV, otherwise copy */
3989 if (octx->key_set)
3990 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3991 else
3992 memcpy(octx->iv, iv, octx->ivlen);
3993 octx->iv_set = 1;
3994 }
3995 return 1;
3996}
e6b336ef
MC
3997
3998static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
0f113f3e
MC
3999 const unsigned char *in, size_t len)
4000{
4001 unsigned char *buf;
4002 int *buf_len;
4003 int written_len = 0;
4004 size_t trailing_len;
6435f0f6 4005 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
0f113f3e
MC
4006
4007 /* If IV or Key not set then return error */
4008 if (!octx->iv_set)
4009 return -1;
4010
4011 if (!octx->key_set)
4012 return -1;
4013
0ba5a9ea 4014 if (in != NULL) {
0f113f3e
MC
4015 /*
4016 * Need to ensure we are only passing full blocks to low level OCB
4017 * routines. We do it here rather than in EVP_EncryptUpdate/
4018 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4019 * and those routines don't support that
4020 */
4021
4022 /* Are we dealing with AAD or normal data here? */
4023 if (out == NULL) {
4024 buf = octx->aad_buf;
4025 buf_len = &(octx->aad_buf_len);
4026 } else {
4027 buf = octx->data_buf;
4028 buf_len = &(octx->data_buf_len);
7141ba31 4029
6d777689 4030 if (ossl_is_partially_overlapping(out + *buf_len, in, len)) {
9311d0c4 4031 ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
7141ba31
MC
4032 return 0;
4033 }
0f113f3e
MC
4034 }
4035
4036 /*
4037 * If we've got a partially filled buffer from a previous call then
4038 * use that data first
4039 */
0ba5a9ea 4040 if (*buf_len > 0) {
0f113f3e
MC
4041 unsigned int remaining;
4042
0ba5a9ea 4043 remaining = AES_BLOCK_SIZE - (*buf_len);
0f113f3e
MC
4044 if (remaining > len) {
4045 memcpy(buf + (*buf_len), in, len);
4046 *(buf_len) += len;
4047 return 0;
4048 }
4049 memcpy(buf + (*buf_len), in, remaining);
4050
4051 /*
4052 * If we get here we've filled the buffer, so process it
4053 */
4054 len -= remaining;
4055 in += remaining;
4056 if (out == NULL) {
0ba5a9ea 4057 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
0f113f3e 4058 return -1;
ed576acd 4059 } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0ba5a9ea
MC
4060 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4061 AES_BLOCK_SIZE))
0f113f3e
MC
4062 return -1;
4063 } else {
0ba5a9ea
MC
4064 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4065 AES_BLOCK_SIZE))
0f113f3e
MC
4066 return -1;
4067 }
0ba5a9ea 4068 written_len = AES_BLOCK_SIZE;
0f113f3e 4069 *buf_len = 0;
7c12c7b6
MC
4070 if (out != NULL)
4071 out += AES_BLOCK_SIZE;
0f113f3e
MC
4072 }
4073
4074 /* Do we have a partial block to handle at the end? */
0ba5a9ea 4075 trailing_len = len % AES_BLOCK_SIZE;
0f113f3e
MC
4076
4077 /*
4078 * If we've got some full blocks to handle, then process these first
4079 */
4080 if (len != trailing_len) {
4081 if (out == NULL) {
4082 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4083 return -1;
ed576acd 4084 } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
4085 if (!CRYPTO_ocb128_encrypt
4086 (&octx->ocb, in, out, len - trailing_len))
4087 return -1;
4088 } else {
4089 if (!CRYPTO_ocb128_decrypt
4090 (&octx->ocb, in, out, len - trailing_len))
4091 return -1;
4092 }
4093 written_len += len - trailing_len;
4094 in += len - trailing_len;
4095 }
4096
4097 /* Handle any trailing partial block */
0ba5a9ea 4098 if (trailing_len > 0) {
0f113f3e
MC
4099 memcpy(buf, in, trailing_len);
4100 *buf_len = trailing_len;
4101 }
4102
4103 return written_len;
4104 } else {
4105 /*
4106 * First of all empty the buffer of any partial block that we might
4107 * have been provided - both for data and AAD
4108 */
0ba5a9ea 4109 if (octx->data_buf_len > 0) {
ed576acd 4110 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
4111 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4112 octx->data_buf_len))
4113 return -1;
4114 } else {
4115 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4116 octx->data_buf_len))
4117 return -1;
4118 }
4119 written_len = octx->data_buf_len;
4120 octx->data_buf_len = 0;
4121 }
0ba5a9ea 4122 if (octx->aad_buf_len > 0) {
0f113f3e
MC
4123 if (!CRYPTO_ocb128_aad
4124 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4125 return -1;
4126 octx->aad_buf_len = 0;
4127 }
4128 /* If decrypting then verify */
ed576acd 4129 if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
0f113f3e
MC
4130 if (octx->taglen < 0)
4131 return -1;
4132 if (CRYPTO_ocb128_finish(&octx->ocb,
4133 octx->tag, octx->taglen) != 0)
4134 return -1;
4135 octx->iv_set = 0;
4136 return written_len;
4137 }
4138 /* If encrypting then just get the tag */
4139 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4140 return -1;
4141 /* Don't reuse the IV */
4142 octx->iv_set = 0;
4143 return written_len;
4144 }
4145}
e6b336ef
MC
4146
4147static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
0f113f3e 4148{
6435f0f6 4149 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
0f113f3e
MC
4150 CRYPTO_ocb128_cleanup(&octx->ocb);
4151 return 1;
4152}
e6b336ef 4153
c4aede20
MC
4154BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4155 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4156BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4157 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4158BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4159 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
5158c763 4160#endif /* OPENSSL_NO_OCB */