]> git.ipfire.org Git - thirdparty/openssl.git/blob - crypto/evp/e_aes.c
Publish the RAND_DRBG API
[thirdparty/openssl.git] / crypto / evp / e_aes.c
1 /*
2 * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved.
3 *
4 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
8 */
9
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
14 #include <string.h>
15 #include <assert.h>
16 #include <openssl/aes.h>
17 #include "internal/evp_int.h"
18 #include "modes_lcl.h"
19 #include <openssl/rand.h>
20 #include <openssl/rand_drbg.h>
21 #include "evp_locl.h"
22
23 typedef struct {
24 union {
25 double align;
26 AES_KEY ks;
27 } ks;
28 block128_f block;
29 union {
30 cbc128_f cbc;
31 ctr128_f ctr;
32 } stream;
33 } EVP_AES_KEY;
34
35 typedef struct {
36 union {
37 double align;
38 AES_KEY ks;
39 } ks; /* AES key schedule to use */
40 int key_set; /* Set if key initialised */
41 int iv_set; /* Set if an iv is set */
42 GCM128_CONTEXT gcm;
43 unsigned char *iv; /* Temporary IV store */
44 int ivlen; /* IV length */
45 int taglen;
46 int iv_gen; /* It is OK to generate IVs */
47 int tls_aad_len; /* TLS AAD length */
48 ctr128_f ctr;
49 } EVP_AES_GCM_CTX;
50
51 typedef struct {
52 union {
53 double align;
54 AES_KEY ks;
55 } ks1, ks2; /* AES key schedules to use */
56 XTS128_CONTEXT xts;
57 void (*stream) (const unsigned char *in,
58 unsigned char *out, size_t length,
59 const AES_KEY *key1, const AES_KEY *key2,
60 const unsigned char iv[16]);
61 } EVP_AES_XTS_CTX;
62
63 typedef struct {
64 union {
65 double align;
66 AES_KEY ks;
67 } ks; /* AES key schedule to use */
68 int key_set; /* Set if key initialised */
69 int iv_set; /* Set if an iv is set */
70 int tag_set; /* Set if tag is valid */
71 int len_set; /* Set if message length set */
72 int L, M; /* L and M parameters from RFC3610 */
73 int tls_aad_len; /* TLS AAD length */
74 CCM128_CONTEXT ccm;
75 ccm128_f str;
76 } EVP_AES_CCM_CTX;
77
78 #ifndef OPENSSL_NO_OCB
79 typedef struct {
80 union {
81 double align;
82 AES_KEY ks;
83 } ksenc; /* AES key schedule to use for encryption */
84 union {
85 double align;
86 AES_KEY ks;
87 } ksdec; /* AES key schedule to use for decryption */
88 int key_set; /* Set if key initialised */
89 int iv_set; /* Set if an iv is set */
90 OCB128_CONTEXT ocb;
91 unsigned char *iv; /* Temporary IV store */
92 unsigned char tag[16];
93 unsigned char data_buf[16]; /* Store partial data blocks */
94 unsigned char aad_buf[16]; /* Store partial AAD blocks */
95 int data_buf_len;
96 int aad_buf_len;
97 int ivlen; /* IV length */
98 int taglen;
99 } EVP_AES_OCB_CTX;
100 #endif
101
102 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
103
104 #ifdef VPAES_ASM
105 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
106 AES_KEY *key);
107 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
108 AES_KEY *key);
109
110 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
111 const AES_KEY *key);
112 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
113 const AES_KEY *key);
114
115 void vpaes_cbc_encrypt(const unsigned char *in,
116 unsigned char *out,
117 size_t length,
118 const AES_KEY *key, unsigned char *ivec, int enc);
119 #endif
120 #ifdef BSAES_ASM
121 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
122 size_t length, const AES_KEY *key,
123 unsigned char ivec[16], int enc);
124 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
125 size_t len, const AES_KEY *key,
126 const unsigned char ivec[16]);
127 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
128 size_t len, const AES_KEY *key1,
129 const AES_KEY *key2, const unsigned char iv[16]);
130 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
131 size_t len, const AES_KEY *key1,
132 const AES_KEY *key2, const unsigned char iv[16]);
133 #endif
134 #ifdef AES_CTR_ASM
135 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
136 size_t blocks, const AES_KEY *key,
137 const unsigned char ivec[AES_BLOCK_SIZE]);
138 #endif
139 #ifdef AES_XTS_ASM
140 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
141 const AES_KEY *key1, const AES_KEY *key2,
142 const unsigned char iv[16]);
143 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
144 const AES_KEY *key1, const AES_KEY *key2,
145 const unsigned char iv[16]);
146 #endif
147
148 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
149 # include "ppc_arch.h"
150 # ifdef VPAES_ASM
151 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
152 # endif
153 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
154 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
155 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
156 # define HWAES_encrypt aes_p8_encrypt
157 # define HWAES_decrypt aes_p8_decrypt
158 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
159 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
160 # define HWAES_xts_encrypt aes_p8_xts_encrypt
161 # define HWAES_xts_decrypt aes_p8_xts_decrypt
162 #endif
163
164 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
165 ((defined(__i386) || defined(__i386__) || \
166 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
167 defined(__x86_64) || defined(__x86_64__) || \
168 defined(_M_AMD64) || defined(_M_X64) )
169
170 extern unsigned int OPENSSL_ia32cap_P[];
171
172 # ifdef VPAES_ASM
173 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
174 # endif
175 # ifdef BSAES_ASM
176 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
177 # endif
178 /*
179 * AES-NI section
180 */
181 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
182
183 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
184 AES_KEY *key);
185 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
186 AES_KEY *key);
187
188 void aesni_encrypt(const unsigned char *in, unsigned char *out,
189 const AES_KEY *key);
190 void aesni_decrypt(const unsigned char *in, unsigned char *out,
191 const AES_KEY *key);
192
193 void aesni_ecb_encrypt(const unsigned char *in,
194 unsigned char *out,
195 size_t length, const AES_KEY *key, int enc);
196 void aesni_cbc_encrypt(const unsigned char *in,
197 unsigned char *out,
198 size_t length,
199 const AES_KEY *key, unsigned char *ivec, int enc);
200
201 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
202 unsigned char *out,
203 size_t blocks,
204 const void *key, const unsigned char *ivec);
205
206 void aesni_xts_encrypt(const unsigned char *in,
207 unsigned char *out,
208 size_t length,
209 const AES_KEY *key1, const AES_KEY *key2,
210 const unsigned char iv[16]);
211
212 void aesni_xts_decrypt(const unsigned char *in,
213 unsigned char *out,
214 size_t length,
215 const AES_KEY *key1, const AES_KEY *key2,
216 const unsigned char iv[16]);
217
218 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
219 unsigned char *out,
220 size_t blocks,
221 const void *key,
222 const unsigned char ivec[16],
223 unsigned char cmac[16]);
224
225 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
226 unsigned char *out,
227 size_t blocks,
228 const void *key,
229 const unsigned char ivec[16],
230 unsigned char cmac[16]);
231
232 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
233 size_t aesni_gcm_encrypt(const unsigned char *in,
234 unsigned char *out,
235 size_t len,
236 const void *key, unsigned char ivec[16], u64 *Xi);
237 # define AES_gcm_encrypt aesni_gcm_encrypt
238 size_t aesni_gcm_decrypt(const unsigned char *in,
239 unsigned char *out,
240 size_t len,
241 const void *key, unsigned char ivec[16], u64 *Xi);
242 # define AES_gcm_decrypt aesni_gcm_decrypt
243 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
244 size_t len);
245 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
246 gctx->gcm.ghash==gcm_ghash_avx)
247 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
248 gctx->gcm.ghash==gcm_ghash_avx)
249 # undef AES_GCM_ASM2 /* minor size optimization */
250 # endif
251
252 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
253 const unsigned char *iv, int enc)
254 {
255 int ret, mode;
256 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
257
258 mode = EVP_CIPHER_CTX_mode(ctx);
259 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
260 && !enc) {
261 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
262 &dat->ks.ks);
263 dat->block = (block128_f) aesni_decrypt;
264 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
265 (cbc128_f) aesni_cbc_encrypt : NULL;
266 } else {
267 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
268 &dat->ks.ks);
269 dat->block = (block128_f) aesni_encrypt;
270 if (mode == EVP_CIPH_CBC_MODE)
271 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
272 else if (mode == EVP_CIPH_CTR_MODE)
273 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
274 else
275 dat->stream.cbc = NULL;
276 }
277
278 if (ret < 0) {
279 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
280 return 0;
281 }
282
283 return 1;
284 }
285
286 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
287 const unsigned char *in, size_t len)
288 {
289 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
290 EVP_CIPHER_CTX_iv_noconst(ctx),
291 EVP_CIPHER_CTX_encrypting(ctx));
292
293 return 1;
294 }
295
296 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
297 const unsigned char *in, size_t len)
298 {
299 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
300
301 if (len < bl)
302 return 1;
303
304 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
305 EVP_CIPHER_CTX_encrypting(ctx));
306
307 return 1;
308 }
309
310 # define aesni_ofb_cipher aes_ofb_cipher
311 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
312 const unsigned char *in, size_t len);
313
314 # define aesni_cfb_cipher aes_cfb_cipher
315 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
316 const unsigned char *in, size_t len);
317
318 # define aesni_cfb8_cipher aes_cfb8_cipher
319 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
320 const unsigned char *in, size_t len);
321
322 # define aesni_cfb1_cipher aes_cfb1_cipher
323 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
324 const unsigned char *in, size_t len);
325
326 # define aesni_ctr_cipher aes_ctr_cipher
327 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
328 const unsigned char *in, size_t len);
329
330 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
331 const unsigned char *iv, int enc)
332 {
333 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
334 if (!iv && !key)
335 return 1;
336 if (key) {
337 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
338 &gctx->ks.ks);
339 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
340 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
341 /*
342 * If we have an iv can set it directly, otherwise use saved IV.
343 */
344 if (iv == NULL && gctx->iv_set)
345 iv = gctx->iv;
346 if (iv) {
347 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
348 gctx->iv_set = 1;
349 }
350 gctx->key_set = 1;
351 } else {
352 /* If key set use IV, otherwise copy */
353 if (gctx->key_set)
354 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
355 else
356 memcpy(gctx->iv, iv, gctx->ivlen);
357 gctx->iv_set = 1;
358 gctx->iv_gen = 0;
359 }
360 return 1;
361 }
362
363 # define aesni_gcm_cipher aes_gcm_cipher
364 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
365 const unsigned char *in, size_t len);
366
367 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
368 const unsigned char *iv, int enc)
369 {
370 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
371 if (!iv && !key)
372 return 1;
373
374 if (key) {
375 /* key_len is two AES keys */
376 if (enc) {
377 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
378 &xctx->ks1.ks);
379 xctx->xts.block1 = (block128_f) aesni_encrypt;
380 xctx->stream = aesni_xts_encrypt;
381 } else {
382 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
383 &xctx->ks1.ks);
384 xctx->xts.block1 = (block128_f) aesni_decrypt;
385 xctx->stream = aesni_xts_decrypt;
386 }
387
388 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
389 EVP_CIPHER_CTX_key_length(ctx) * 4,
390 &xctx->ks2.ks);
391 xctx->xts.block2 = (block128_f) aesni_encrypt;
392
393 xctx->xts.key1 = &xctx->ks1;
394 }
395
396 if (iv) {
397 xctx->xts.key2 = &xctx->ks2;
398 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
399 }
400
401 return 1;
402 }
403
404 # define aesni_xts_cipher aes_xts_cipher
405 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
406 const unsigned char *in, size_t len);
407
408 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
409 const unsigned char *iv, int enc)
410 {
411 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
412 if (!iv && !key)
413 return 1;
414 if (key) {
415 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
416 &cctx->ks.ks);
417 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
418 &cctx->ks, (block128_f) aesni_encrypt);
419 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
420 (ccm128_f) aesni_ccm64_decrypt_blocks;
421 cctx->key_set = 1;
422 }
423 if (iv) {
424 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
425 cctx->iv_set = 1;
426 }
427 return 1;
428 }
429
430 # define aesni_ccm_cipher aes_ccm_cipher
431 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
432 const unsigned char *in, size_t len);
433
434 # ifndef OPENSSL_NO_OCB
435 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
436 size_t blocks, const void *key,
437 size_t start_block_num,
438 unsigned char offset_i[16],
439 const unsigned char L_[][16],
440 unsigned char checksum[16]);
441 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
442 size_t blocks, const void *key,
443 size_t start_block_num,
444 unsigned char offset_i[16],
445 const unsigned char L_[][16],
446 unsigned char checksum[16]);
447
448 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
449 const unsigned char *iv, int enc)
450 {
451 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
452 if (!iv && !key)
453 return 1;
454 if (key) {
455 do {
456 /*
457 * We set both the encrypt and decrypt key here because decrypt
458 * needs both. We could possibly optimise to remove setting the
459 * decrypt for an encryption operation.
460 */
461 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
462 &octx->ksenc.ks);
463 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
464 &octx->ksdec.ks);
465 if (!CRYPTO_ocb128_init(&octx->ocb,
466 &octx->ksenc.ks, &octx->ksdec.ks,
467 (block128_f) aesni_encrypt,
468 (block128_f) aesni_decrypt,
469 enc ? aesni_ocb_encrypt
470 : aesni_ocb_decrypt))
471 return 0;
472 }
473 while (0);
474
475 /*
476 * If we have an iv we can set it directly, otherwise use saved IV.
477 */
478 if (iv == NULL && octx->iv_set)
479 iv = octx->iv;
480 if (iv) {
481 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
482 != 1)
483 return 0;
484 octx->iv_set = 1;
485 }
486 octx->key_set = 1;
487 } else {
488 /* If key set use IV, otherwise copy */
489 if (octx->key_set)
490 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
491 else
492 memcpy(octx->iv, iv, octx->ivlen);
493 octx->iv_set = 1;
494 }
495 return 1;
496 }
497
498 # define aesni_ocb_cipher aes_ocb_cipher
499 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
500 const unsigned char *in, size_t len);
501 # endif /* OPENSSL_NO_OCB */
502
503 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
504 static const EVP_CIPHER aesni_##keylen##_##mode = { \
505 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
506 flags|EVP_CIPH_##MODE##_MODE, \
507 aesni_init_key, \
508 aesni_##mode##_cipher, \
509 NULL, \
510 sizeof(EVP_AES_KEY), \
511 NULL,NULL,NULL,NULL }; \
512 static const EVP_CIPHER aes_##keylen##_##mode = { \
513 nid##_##keylen##_##nmode,blocksize, \
514 keylen/8,ivlen, \
515 flags|EVP_CIPH_##MODE##_MODE, \
516 aes_init_key, \
517 aes_##mode##_cipher, \
518 NULL, \
519 sizeof(EVP_AES_KEY), \
520 NULL,NULL,NULL,NULL }; \
521 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
522 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
523
524 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
525 static const EVP_CIPHER aesni_##keylen##_##mode = { \
526 nid##_##keylen##_##mode,blocksize, \
527 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
528 flags|EVP_CIPH_##MODE##_MODE, \
529 aesni_##mode##_init_key, \
530 aesni_##mode##_cipher, \
531 aes_##mode##_cleanup, \
532 sizeof(EVP_AES_##MODE##_CTX), \
533 NULL,NULL,aes_##mode##_ctrl,NULL }; \
534 static const EVP_CIPHER aes_##keylen##_##mode = { \
535 nid##_##keylen##_##mode,blocksize, \
536 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
537 flags|EVP_CIPH_##MODE##_MODE, \
538 aes_##mode##_init_key, \
539 aes_##mode##_cipher, \
540 aes_##mode##_cleanup, \
541 sizeof(EVP_AES_##MODE##_CTX), \
542 NULL,NULL,aes_##mode##_ctrl,NULL }; \
543 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
544 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
545
546 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
547
548 # include "sparc_arch.h"
549
550 extern unsigned int OPENSSL_sparcv9cap_P[];
551
552 /*
553 * Initial Fujitsu SPARC64 X support
554 */
555 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
556 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
557 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
558 # define HWAES_encrypt aes_fx_encrypt
559 # define HWAES_decrypt aes_fx_decrypt
560 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
561 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
562
563 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
564
565 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
566 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
567 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
568 const AES_KEY *key);
569 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
570 const AES_KEY *key);
571 /*
572 * Key-length specific subroutines were chosen for following reason.
573 * Each SPARC T4 core can execute up to 8 threads which share core's
574 * resources. Loading as much key material to registers allows to
575 * minimize references to shared memory interface, as well as amount
576 * of instructions in inner loops [much needed on T4]. But then having
577 * non-key-length specific routines would require conditional branches
578 * either in inner loops or on subroutines' entries. Former is hardly
579 * acceptable, while latter means code size increase to size occupied
580 * by multiple key-length specific subroutines, so why fight?
581 */
582 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
583 size_t len, const AES_KEY *key,
584 unsigned char *ivec);
585 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
586 size_t len, const AES_KEY *key,
587 unsigned char *ivec);
588 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
589 size_t len, const AES_KEY *key,
590 unsigned char *ivec);
591 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
592 size_t len, const AES_KEY *key,
593 unsigned char *ivec);
594 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
595 size_t len, const AES_KEY *key,
596 unsigned char *ivec);
597 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
598 size_t len, const AES_KEY *key,
599 unsigned char *ivec);
600 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
601 size_t blocks, const AES_KEY *key,
602 unsigned char *ivec);
603 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
604 size_t blocks, const AES_KEY *key,
605 unsigned char *ivec);
606 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
607 size_t blocks, const AES_KEY *key,
608 unsigned char *ivec);
609 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
610 size_t blocks, const AES_KEY *key1,
611 const AES_KEY *key2, const unsigned char *ivec);
612 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
613 size_t blocks, const AES_KEY *key1,
614 const AES_KEY *key2, const unsigned char *ivec);
615 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
616 size_t blocks, const AES_KEY *key1,
617 const AES_KEY *key2, const unsigned char *ivec);
618 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
619 size_t blocks, const AES_KEY *key1,
620 const AES_KEY *key2, const unsigned char *ivec);
621
622 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
623 const unsigned char *iv, int enc)
624 {
625 int ret, mode, bits;
626 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
627
628 mode = EVP_CIPHER_CTX_mode(ctx);
629 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
630 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
631 && !enc) {
632 ret = 0;
633 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
634 dat->block = (block128_f) aes_t4_decrypt;
635 switch (bits) {
636 case 128:
637 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
638 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
639 break;
640 case 192:
641 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
642 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
643 break;
644 case 256:
645 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
646 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
647 break;
648 default:
649 ret = -1;
650 }
651 } else {
652 ret = 0;
653 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
654 dat->block = (block128_f) aes_t4_encrypt;
655 switch (bits) {
656 case 128:
657 if (mode == EVP_CIPH_CBC_MODE)
658 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
659 else if (mode == EVP_CIPH_CTR_MODE)
660 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
661 else
662 dat->stream.cbc = NULL;
663 break;
664 case 192:
665 if (mode == EVP_CIPH_CBC_MODE)
666 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
667 else if (mode == EVP_CIPH_CTR_MODE)
668 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
669 else
670 dat->stream.cbc = NULL;
671 break;
672 case 256:
673 if (mode == EVP_CIPH_CBC_MODE)
674 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
675 else if (mode == EVP_CIPH_CTR_MODE)
676 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
677 else
678 dat->stream.cbc = NULL;
679 break;
680 default:
681 ret = -1;
682 }
683 }
684
685 if (ret < 0) {
686 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
687 return 0;
688 }
689
690 return 1;
691 }
692
693 # define aes_t4_cbc_cipher aes_cbc_cipher
694 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
695 const unsigned char *in, size_t len);
696
697 # define aes_t4_ecb_cipher aes_ecb_cipher
698 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
699 const unsigned char *in, size_t len);
700
701 # define aes_t4_ofb_cipher aes_ofb_cipher
702 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
703 const unsigned char *in, size_t len);
704
705 # define aes_t4_cfb_cipher aes_cfb_cipher
706 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
707 const unsigned char *in, size_t len);
708
709 # define aes_t4_cfb8_cipher aes_cfb8_cipher
710 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
711 const unsigned char *in, size_t len);
712
713 # define aes_t4_cfb1_cipher aes_cfb1_cipher
714 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
715 const unsigned char *in, size_t len);
716
717 # define aes_t4_ctr_cipher aes_ctr_cipher
718 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
719 const unsigned char *in, size_t len);
720
721 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
722 const unsigned char *iv, int enc)
723 {
724 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
725 if (!iv && !key)
726 return 1;
727 if (key) {
728 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
729 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
730 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
731 (block128_f) aes_t4_encrypt);
732 switch (bits) {
733 case 128:
734 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
735 break;
736 case 192:
737 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
738 break;
739 case 256:
740 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
741 break;
742 default:
743 return 0;
744 }
745 /*
746 * If we have an iv can set it directly, otherwise use saved IV.
747 */
748 if (iv == NULL && gctx->iv_set)
749 iv = gctx->iv;
750 if (iv) {
751 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
752 gctx->iv_set = 1;
753 }
754 gctx->key_set = 1;
755 } else {
756 /* If key set use IV, otherwise copy */
757 if (gctx->key_set)
758 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
759 else
760 memcpy(gctx->iv, iv, gctx->ivlen);
761 gctx->iv_set = 1;
762 gctx->iv_gen = 0;
763 }
764 return 1;
765 }
766
767 # define aes_t4_gcm_cipher aes_gcm_cipher
768 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
769 const unsigned char *in, size_t len);
770
771 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
772 const unsigned char *iv, int enc)
773 {
774 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
775 if (!iv && !key)
776 return 1;
777
778 if (key) {
779 int bits = EVP_CIPHER_CTX_key_length(ctx) * 4;
780 xctx->stream = NULL;
781 /* key_len is two AES keys */
782 if (enc) {
783 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
784 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
785 switch (bits) {
786 case 128:
787 xctx->stream = aes128_t4_xts_encrypt;
788 break;
789 case 256:
790 xctx->stream = aes256_t4_xts_encrypt;
791 break;
792 default:
793 return 0;
794 }
795 } else {
796 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
797 &xctx->ks1.ks);
798 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
799 switch (bits) {
800 case 128:
801 xctx->stream = aes128_t4_xts_decrypt;
802 break;
803 case 256:
804 xctx->stream = aes256_t4_xts_decrypt;
805 break;
806 default:
807 return 0;
808 }
809 }
810
811 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
812 EVP_CIPHER_CTX_key_length(ctx) * 4,
813 &xctx->ks2.ks);
814 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
815
816 xctx->xts.key1 = &xctx->ks1;
817 }
818
819 if (iv) {
820 xctx->xts.key2 = &xctx->ks2;
821 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
822 }
823
824 return 1;
825 }
826
827 # define aes_t4_xts_cipher aes_xts_cipher
828 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
829 const unsigned char *in, size_t len);
830
831 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
832 const unsigned char *iv, int enc)
833 {
834 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
835 if (!iv && !key)
836 return 1;
837 if (key) {
838 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
839 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
840 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
841 &cctx->ks, (block128_f) aes_t4_encrypt);
842 cctx->str = NULL;
843 cctx->key_set = 1;
844 }
845 if (iv) {
846 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
847 cctx->iv_set = 1;
848 }
849 return 1;
850 }
851
852 # define aes_t4_ccm_cipher aes_ccm_cipher
853 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
854 const unsigned char *in, size_t len);
855
856 # ifndef OPENSSL_NO_OCB
857 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
858 const unsigned char *iv, int enc)
859 {
860 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
861 if (!iv && !key)
862 return 1;
863 if (key) {
864 do {
865 /*
866 * We set both the encrypt and decrypt key here because decrypt
867 * needs both. We could possibly optimise to remove setting the
868 * decrypt for an encryption operation.
869 */
870 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
871 &octx->ksenc.ks);
872 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
873 &octx->ksdec.ks);
874 if (!CRYPTO_ocb128_init(&octx->ocb,
875 &octx->ksenc.ks, &octx->ksdec.ks,
876 (block128_f) aes_t4_encrypt,
877 (block128_f) aes_t4_decrypt,
878 NULL))
879 return 0;
880 }
881 while (0);
882
883 /*
884 * If we have an iv we can set it directly, otherwise use saved IV.
885 */
886 if (iv == NULL && octx->iv_set)
887 iv = octx->iv;
888 if (iv) {
889 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
890 != 1)
891 return 0;
892 octx->iv_set = 1;
893 }
894 octx->key_set = 1;
895 } else {
896 /* If key set use IV, otherwise copy */
897 if (octx->key_set)
898 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
899 else
900 memcpy(octx->iv, iv, octx->ivlen);
901 octx->iv_set = 1;
902 }
903 return 1;
904 }
905
906 # define aes_t4_ocb_cipher aes_ocb_cipher
907 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
908 const unsigned char *in, size_t len);
909 # endif /* OPENSSL_NO_OCB */
910
911 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
912 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
913 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
914 flags|EVP_CIPH_##MODE##_MODE, \
915 aes_t4_init_key, \
916 aes_t4_##mode##_cipher, \
917 NULL, \
918 sizeof(EVP_AES_KEY), \
919 NULL,NULL,NULL,NULL }; \
920 static const EVP_CIPHER aes_##keylen##_##mode = { \
921 nid##_##keylen##_##nmode,blocksize, \
922 keylen/8,ivlen, \
923 flags|EVP_CIPH_##MODE##_MODE, \
924 aes_init_key, \
925 aes_##mode##_cipher, \
926 NULL, \
927 sizeof(EVP_AES_KEY), \
928 NULL,NULL,NULL,NULL }; \
929 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
930 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
931
932 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
933 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
934 nid##_##keylen##_##mode,blocksize, \
935 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
936 flags|EVP_CIPH_##MODE##_MODE, \
937 aes_t4_##mode##_init_key, \
938 aes_t4_##mode##_cipher, \
939 aes_##mode##_cleanup, \
940 sizeof(EVP_AES_##MODE##_CTX), \
941 NULL,NULL,aes_##mode##_ctrl,NULL }; \
942 static const EVP_CIPHER aes_##keylen##_##mode = { \
943 nid##_##keylen##_##mode,blocksize, \
944 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
945 flags|EVP_CIPH_##MODE##_MODE, \
946 aes_##mode##_init_key, \
947 aes_##mode##_cipher, \
948 aes_##mode##_cleanup, \
949 sizeof(EVP_AES_##MODE##_CTX), \
950 NULL,NULL,aes_##mode##_ctrl,NULL }; \
951 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
952 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
953
954 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
955 /*
956 * IBM S390X support
957 */
958 # include "s390x_arch.h"
959
960 typedef struct {
961 union {
962 double align;
963 /*-
964 * KMA-GCM-AES parameter block - begin
965 * (see z/Architecture Principles of Operation >= SA22-7832-11)
966 */
967 struct {
968 unsigned char reserved[12];
969 union {
970 unsigned int w;
971 unsigned char b[4];
972 } cv;
973 union {
974 unsigned long long g[2];
975 unsigned char b[16];
976 } t;
977 unsigned char h[16];
978 unsigned long long taadl;
979 unsigned long long tpcl;
980 union {
981 unsigned long long g[2];
982 unsigned int w[4];
983 } j0;
984 unsigned char k[32];
985 } param;
986 /* KMA-GCM-AES parameter block - end */
987 } kma;
988 unsigned int fc;
989 int key_set;
990
991 unsigned char *iv;
992 int ivlen;
993 int iv_set;
994 int iv_gen;
995
996 int taglen;
997
998 unsigned char ares[16];
999 unsigned char mres[16];
1000 unsigned char kres[16];
1001 int areslen;
1002 int mreslen;
1003 int kreslen;
1004
1005 int tls_aad_len;
1006 } S390X_AES_GCM_CTX;
1007
1008 typedef struct {
1009 union {
1010 double align;
1011 /*-
1012 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1013 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1014 * rounds field is used to store the function code and that the key
1015 * schedule is not stored (if aes hardware support is detected).
1016 */
1017 struct {
1018 unsigned char pad[16];
1019 AES_KEY k;
1020 } key;
1021
1022 struct {
1023 /*-
1024 * KMAC-AES parameter block - begin
1025 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1026 */
1027 struct {
1028 union {
1029 unsigned long long g[2];
1030 unsigned char b[16];
1031 } icv;
1032 unsigned char k[32];
1033 } kmac_param;
1034 /* KMAC-AES paramater block - end */
1035
1036 union {
1037 unsigned long long g[2];
1038 unsigned char b[16];
1039 } nonce;
1040 union {
1041 unsigned long long g[2];
1042 unsigned char b[16];
1043 } buf;
1044
1045 unsigned long long blocks;
1046 int l;
1047 int m;
1048 int tls_aad_len;
1049 int iv_set;
1050 int tag_set;
1051 int len_set;
1052 int key_set;
1053
1054 unsigned char pad[140];
1055 unsigned int fc;
1056 } ccm;
1057 } aes;
1058 } S390X_AES_CCM_CTX;
1059
1060 # define S390X_aes_128_CAPABLE ((OPENSSL_s390xcap_P.km[0] & \
1061 S390X_CAPBIT(S390X_AES_128)) &&\
1062 (OPENSSL_s390xcap_P.kmc[0] & \
1063 S390X_CAPBIT(S390X_AES_128)))
1064 # define S390X_aes_192_CAPABLE ((OPENSSL_s390xcap_P.km[0] & \
1065 S390X_CAPBIT(S390X_AES_192)) &&\
1066 (OPENSSL_s390xcap_P.kmc[0] & \
1067 S390X_CAPBIT(S390X_AES_192)))
1068 # define S390X_aes_256_CAPABLE ((OPENSSL_s390xcap_P.km[0] & \
1069 S390X_CAPBIT(S390X_AES_256)) &&\
1070 (OPENSSL_s390xcap_P.kmc[0] & \
1071 S390X_CAPBIT(S390X_AES_256)))
1072
1073 # define s390x_aes_init_key aes_init_key
1074 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1075 const unsigned char *iv, int enc);
1076
1077 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1078 # define S390X_aes_192_cbc_CAPABLE 1
1079 # define S390X_aes_256_cbc_CAPABLE 1
1080
1081 # define s390x_aes_cbc_cipher aes_cbc_cipher
1082 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1083 const unsigned char *in, size_t len);
1084
1085 # define S390X_aes_128_ecb_CAPABLE 0
1086 # define S390X_aes_192_ecb_CAPABLE 0
1087 # define S390X_aes_256_ecb_CAPABLE 0
1088
1089 # define s390x_aes_ecb_cipher aes_ecb_cipher
1090 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1091 const unsigned char *in, size_t len);
1092
1093 # define S390X_aes_128_ofb_CAPABLE 0
1094 # define S390X_aes_192_ofb_CAPABLE 0
1095 # define S390X_aes_256_ofb_CAPABLE 0
1096
1097 # define s390x_aes_ofb_cipher aes_ofb_cipher
1098 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1099 const unsigned char *in, size_t len);
1100
1101 # define S390X_aes_128_cfb_CAPABLE 0
1102 # define S390X_aes_192_cfb_CAPABLE 0
1103 # define S390X_aes_256_cfb_CAPABLE 0
1104
1105 # define s390x_aes_cfb_cipher aes_cfb_cipher
1106 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1107 const unsigned char *in, size_t len);
1108
1109 # define S390X_aes_128_cfb8_CAPABLE 0
1110 # define S390X_aes_192_cfb8_CAPABLE 0
1111 # define S390X_aes_256_cfb8_CAPABLE 0
1112
1113 # define s390x_aes_cfb8_cipher aes_cfb8_cipher
1114 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1115 const unsigned char *in, size_t len);
1116
1117 # define S390X_aes_128_cfb1_CAPABLE 0
1118 # define S390X_aes_192_cfb1_CAPABLE 0
1119 # define S390X_aes_256_cfb1_CAPABLE 0
1120
1121 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1122 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1123 const unsigned char *in, size_t len);
1124
1125 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1126 # define S390X_aes_192_ctr_CAPABLE 1
1127 # define S390X_aes_256_ctr_CAPABLE 1
1128
1129 # define s390x_aes_ctr_cipher aes_ctr_cipher
1130 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1131 const unsigned char *in, size_t len);
1132
1133 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1134 (OPENSSL_s390xcap_P.kma[0] & \
1135 S390X_CAPBIT(S390X_AES_128)))
1136 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1137 (OPENSSL_s390xcap_P.kma[0] & \
1138 S390X_CAPBIT(S390X_AES_192)))
1139 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1140 (OPENSSL_s390xcap_P.kma[0] & \
1141 S390X_CAPBIT(S390X_AES_256)))
1142
1143 /* iv + padding length for iv lenghts != 12 */
1144 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1145
1146 /*-
1147 * Process additional authenticated data. Returns 0 on success. Code is
1148 * big-endian.
1149 */
1150 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1151 size_t len)
1152 {
1153 unsigned long long alen;
1154 int n, rem;
1155
1156 if (ctx->kma.param.tpcl)
1157 return -2;
1158
1159 alen = ctx->kma.param.taadl + len;
1160 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1161 return -1;
1162 ctx->kma.param.taadl = alen;
1163
1164 n = ctx->areslen;
1165 if (n) {
1166 while (n && len) {
1167 ctx->ares[n] = *aad;
1168 n = (n + 1) & 0xf;
1169 ++aad;
1170 --len;
1171 }
1172 /* ctx->ares contains a complete block if offset has wrapped around */
1173 if (!n) {
1174 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1175 ctx->fc |= S390X_KMA_HS;
1176 }
1177 ctx->areslen = n;
1178 }
1179
1180 rem = len & 0xf;
1181
1182 len &= ~0xf;
1183 if (len) {
1184 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1185 aad += len;
1186 ctx->fc |= S390X_KMA_HS;
1187 }
1188
1189 if (rem) {
1190 ctx->areslen = rem;
1191
1192 do {
1193 --rem;
1194 ctx->ares[rem] = aad[rem];
1195 } while (rem);
1196 }
1197 return 0;
1198 }
1199
1200 /*-
1201 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1202 * success. Code is big-endian.
1203 */
1204 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1205 unsigned char *out, size_t len)
1206 {
1207 const unsigned char *inptr;
1208 unsigned long long mlen;
1209 union {
1210 unsigned int w[4];
1211 unsigned char b[16];
1212 } buf;
1213 size_t inlen;
1214 int n, rem, i;
1215
1216 mlen = ctx->kma.param.tpcl + len;
1217 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1218 return -1;
1219 ctx->kma.param.tpcl = mlen;
1220
1221 n = ctx->mreslen;
1222 if (n) {
1223 inptr = in;
1224 inlen = len;
1225 while (n && inlen) {
1226 ctx->mres[n] = *inptr;
1227 n = (n + 1) & 0xf;
1228 ++inptr;
1229 --inlen;
1230 }
1231 /* ctx->mres contains a complete block if offset has wrapped around */
1232 if (!n) {
1233 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1234 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1235 ctx->fc |= S390X_KMA_HS;
1236 ctx->areslen = 0;
1237
1238 /* previous call already encrypted/decrypted its remainder,
1239 * see comment below */
1240 n = ctx->mreslen;
1241 while (n) {
1242 *out = buf.b[n];
1243 n = (n + 1) & 0xf;
1244 ++out;
1245 ++in;
1246 --len;
1247 }
1248 ctx->mreslen = 0;
1249 }
1250 }
1251
1252 rem = len & 0xf;
1253
1254 len &= ~0xf;
1255 if (len) {
1256 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1257 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1258 in += len;
1259 out += len;
1260 ctx->fc |= S390X_KMA_HS;
1261 ctx->areslen = 0;
1262 }
1263
1264 /*-
1265 * If there is a remainder, it has to be saved such that it can be
1266 * processed by kma later. However, we also have to do the for-now
1267 * unauthenticated encryption/decryption part here and now...
1268 */
1269 if (rem) {
1270 if (!ctx->mreslen) {
1271 buf.w[0] = ctx->kma.param.j0.w[0];
1272 buf.w[1] = ctx->kma.param.j0.w[1];
1273 buf.w[2] = ctx->kma.param.j0.w[2];
1274 buf.w[3] = ctx->kma.param.cv.w + 1;
1275 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1276 }
1277
1278 n = ctx->mreslen;
1279 for (i = 0; i < rem; i++) {
1280 ctx->mres[n + i] = in[i];
1281 out[i] = in[i] ^ ctx->kres[n + i];
1282 }
1283
1284 ctx->mreslen += rem;
1285 }
1286 return 0;
1287 }
1288
1289 /*-
1290 * Initialize context structure. Code is big-endian.
1291 */
1292 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1293 const unsigned char *iv)
1294 {
1295 ctx->kma.param.t.g[0] = 0;
1296 ctx->kma.param.t.g[1] = 0;
1297 ctx->kma.param.tpcl = 0;
1298 ctx->kma.param.taadl = 0;
1299 ctx->mreslen = 0;
1300 ctx->areslen = 0;
1301 ctx->kreslen = 0;
1302
1303 if (ctx->ivlen == 12) {
1304 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1305 ctx->kma.param.j0.w[3] = 1;
1306 ctx->kma.param.cv.w = 1;
1307 } else {
1308 /* ctx->iv has the right size and is already padded. */
1309 memcpy(ctx->iv, iv, ctx->ivlen);
1310 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1311 ctx->fc, &ctx->kma.param);
1312 ctx->fc |= S390X_KMA_HS;
1313
1314 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1315 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1316 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1317 ctx->kma.param.t.g[0] = 0;
1318 ctx->kma.param.t.g[1] = 0;
1319 }
1320 }
1321
1322 /*-
1323 * Performs various operations on the context structure depending on control
1324 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1325 * Code is big-endian.
1326 */
1327 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1328 {
1329 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1330 S390X_AES_GCM_CTX *gctx_out;
1331 EVP_CIPHER_CTX *out;
1332 unsigned char *buf, *iv;
1333 int ivlen, enc, len;
1334
1335 switch (type) {
1336 case EVP_CTRL_INIT:
1337 ivlen = EVP_CIPHER_CTX_iv_length(c);
1338 iv = EVP_CIPHER_CTX_iv_noconst(c);
1339 gctx->key_set = 0;
1340 gctx->iv_set = 0;
1341 gctx->ivlen = ivlen;
1342 gctx->iv = iv;
1343 gctx->taglen = -1;
1344 gctx->iv_gen = 0;
1345 gctx->tls_aad_len = -1;
1346 return 1;
1347
1348 case EVP_CTRL_AEAD_SET_IVLEN:
1349 if (arg <= 0)
1350 return 0;
1351
1352 if (arg != 12) {
1353 iv = EVP_CIPHER_CTX_iv_noconst(c);
1354 len = S390X_gcm_ivpadlen(arg);
1355
1356 /* Allocate memory for iv if needed. */
1357 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1358 if (gctx->iv != iv)
1359 OPENSSL_free(gctx->iv);
1360
1361 gctx->iv = OPENSSL_malloc(len);
1362 if (gctx->iv == NULL)
1363 return 0;
1364 }
1365 /* Add padding. */
1366 memset(gctx->iv + arg, 0, len - arg - 8);
1367 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1368 }
1369 gctx->ivlen = arg;
1370 return 1;
1371
1372 case EVP_CTRL_AEAD_SET_TAG:
1373 buf = EVP_CIPHER_CTX_buf_noconst(c);
1374 enc = EVP_CIPHER_CTX_encrypting(c);
1375 if (arg <= 0 || arg > 16 || enc)
1376 return 0;
1377
1378 memcpy(buf, ptr, arg);
1379 gctx->taglen = arg;
1380 return 1;
1381
1382 case EVP_CTRL_AEAD_GET_TAG:
1383 enc = EVP_CIPHER_CTX_encrypting(c);
1384 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1385 return 0;
1386
1387 memcpy(ptr, gctx->kma.param.t.b, arg);
1388 return 1;
1389
1390 case EVP_CTRL_GCM_SET_IV_FIXED:
1391 /* Special case: -1 length restores whole iv */
1392 if (arg == -1) {
1393 memcpy(gctx->iv, ptr, gctx->ivlen);
1394 gctx->iv_gen = 1;
1395 return 1;
1396 }
1397 /*
1398 * Fixed field must be at least 4 bytes and invocation field at least
1399 * 8.
1400 */
1401 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1402 return 0;
1403
1404 if (arg)
1405 memcpy(gctx->iv, ptr, arg);
1406
1407 enc = EVP_CIPHER_CTX_encrypting(c);
1408 if (enc) {
1409 if (c->drbg != NULL) {
1410 if (RAND_DRBG_bytes(c->drbg, gctx->iv + arg, gctx->ivlen - arg) == 0)
1411 return 0;
1412 } else if (RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) {
1413 return 0;
1414 }
1415 }
1416
1417 gctx->iv_gen = 1;
1418 return 1;
1419
1420 case EVP_CTRL_GCM_IV_GEN:
1421 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1422 return 0;
1423
1424 s390x_aes_gcm_setiv(gctx, gctx->iv);
1425
1426 if (arg <= 0 || arg > gctx->ivlen)
1427 arg = gctx->ivlen;
1428
1429 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1430 /*
1431 * Invocation field will be at least 8 bytes in size and so no need
1432 * to check wrap around or increment more than last 8 bytes.
1433 */
1434 (*(unsigned long long *)(gctx->iv + gctx->ivlen - 8))++;
1435 gctx->iv_set = 1;
1436 return 1;
1437
1438 case EVP_CTRL_GCM_SET_IV_INV:
1439 enc = EVP_CIPHER_CTX_encrypting(c);
1440 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1441 return 0;
1442
1443 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1444 s390x_aes_gcm_setiv(gctx, gctx->iv);
1445 gctx->iv_set = 1;
1446 return 1;
1447
1448 case EVP_CTRL_AEAD_TLS1_AAD:
1449 /* Save the aad for later use. */
1450 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1451 return 0;
1452
1453 buf = EVP_CIPHER_CTX_buf_noconst(c);
1454 memcpy(buf, ptr, arg);
1455 gctx->tls_aad_len = arg;
1456
1457 len = buf[arg - 2] << 8 | buf[arg - 1];
1458 /* Correct length for explicit iv. */
1459 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1460 return 0;
1461 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1462
1463 /* If decrypting correct for tag too. */
1464 enc = EVP_CIPHER_CTX_encrypting(c);
1465 if (!enc) {
1466 if (len < EVP_GCM_TLS_TAG_LEN)
1467 return 0;
1468 len -= EVP_GCM_TLS_TAG_LEN;
1469 }
1470 buf[arg - 2] = len >> 8;
1471 buf[arg - 1] = len & 0xff;
1472 /* Extra padding: tag appended to record. */
1473 return EVP_GCM_TLS_TAG_LEN;
1474
1475 case EVP_CTRL_COPY:
1476 out = ptr;
1477 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1478 iv = EVP_CIPHER_CTX_iv_noconst(c);
1479
1480 if (gctx->iv == iv) {
1481 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1482 } else {
1483 len = S390X_gcm_ivpadlen(gctx->ivlen);
1484
1485 gctx_out->iv = OPENSSL_malloc(len);
1486 if (gctx_out->iv == NULL)
1487 return 0;
1488
1489 memcpy(gctx_out->iv, gctx->iv, len);
1490 }
1491 return 1;
1492
1493 default:
1494 return -1;
1495 }
1496 }
1497
1498 /*-
1499 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1500 */
1501 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1502 const unsigned char *key,
1503 const unsigned char *iv, int enc)
1504 {
1505 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1506 int keylen;
1507
1508 if (iv == NULL && key == NULL)
1509 return 1;
1510
1511 if (key != NULL) {
1512 keylen = EVP_CIPHER_CTX_key_length(ctx);
1513 memcpy(&gctx->kma.param.k, key, keylen);
1514
1515 /* Convert key size to function code. */
1516 gctx->fc = S390X_AES_128 + (((keylen << 3) - 128) >> 6);
1517 if (!enc)
1518 gctx->fc |= S390X_DECRYPT;
1519
1520 if (iv == NULL && gctx->iv_set)
1521 iv = gctx->iv;
1522
1523 if (iv != NULL) {
1524 s390x_aes_gcm_setiv(gctx, iv);
1525 gctx->iv_set = 1;
1526 }
1527 gctx->key_set = 1;
1528 } else {
1529 if (gctx->key_set)
1530 s390x_aes_gcm_setiv(gctx, iv);
1531 else
1532 memcpy(gctx->iv, iv, gctx->ivlen);
1533
1534 gctx->iv_set = 1;
1535 gctx->iv_gen = 0;
1536 }
1537 return 1;
1538 }
1539
1540 /*-
1541 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1542 * if successful. Otherwise -1 is returned. Code is big-endian.
1543 */
1544 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1545 const unsigned char *in, size_t len)
1546 {
1547 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1548 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1549 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1550 int rv = -1;
1551
1552 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1553 return -1;
1554
1555 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1556 : EVP_CTRL_GCM_SET_IV_INV,
1557 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1558 goto err;
1559
1560 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1561 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1562 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1563
1564 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1565 gctx->kma.param.tpcl = len << 3;
1566 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1567 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1568
1569 if (enc) {
1570 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1571 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1572 } else {
1573 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1574 EVP_GCM_TLS_TAG_LEN)) {
1575 OPENSSL_cleanse(out, len);
1576 goto err;
1577 }
1578 rv = len;
1579 }
1580 err:
1581 gctx->iv_set = 0;
1582 gctx->tls_aad_len = -1;
1583 return rv;
1584 }
1585
1586 /*-
1587 * Called from EVP layer to initialize context, process additional
1588 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1589 * ciphertext or process a TLS packet, depending on context. Returns bytes
1590 * written on success. Otherwise -1 is returned. Code is big-endian.
1591 */
1592 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1593 const unsigned char *in, size_t len)
1594 {
1595 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1596 unsigned char *buf, tmp[16];
1597 int enc;
1598
1599 if (!gctx->key_set)
1600 return -1;
1601
1602 if (gctx->tls_aad_len >= 0)
1603 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1604
1605 if (!gctx->iv_set)
1606 return -1;
1607
1608 if (in != NULL) {
1609 if (out == NULL) {
1610 if (s390x_aes_gcm_aad(gctx, in, len))
1611 return -1;
1612 } else {
1613 if (s390x_aes_gcm(gctx, in, out, len))
1614 return -1;
1615 }
1616 return len;
1617 } else {
1618 gctx->kma.param.taadl <<= 3;
1619 gctx->kma.param.tpcl <<= 3;
1620 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1621 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1622 /* recall that we already did en-/decrypt gctx->mres
1623 * and returned it to caller... */
1624 OPENSSL_cleanse(tmp, gctx->mreslen);
1625 gctx->iv_set = 0;
1626
1627 enc = EVP_CIPHER_CTX_encrypting(ctx);
1628 if (enc) {
1629 gctx->taglen = 16;
1630 } else {
1631 if (gctx->taglen < 0)
1632 return -1;
1633
1634 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1635 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1636 return -1;
1637 }
1638 return 0;
1639 }
1640 }
1641
1642 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1643 {
1644 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1645 const unsigned char *iv;
1646
1647 if (gctx == NULL)
1648 return 0;
1649
1650 iv = EVP_CIPHER_CTX_iv(c);
1651 if (iv != gctx->iv)
1652 OPENSSL_free(gctx->iv);
1653
1654 OPENSSL_cleanse(gctx, sizeof(*gctx));
1655 return 1;
1656 }
1657
1658 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1659 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1660 # define S390X_aes_256_xts_CAPABLE 1
1661
1662 # define s390x_aes_xts_init_key aes_xts_init_key
1663 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1664 const unsigned char *key,
1665 const unsigned char *iv, int enc);
1666 # define s390x_aes_xts_cipher aes_xts_cipher
1667 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1668 const unsigned char *in, size_t len);
1669 # define s390x_aes_xts_ctrl aes_xts_ctrl
1670 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1671 # define s390x_aes_xts_cleanup aes_xts_cleanup
1672
1673 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1674 (OPENSSL_s390xcap_P.kmac[0] & \
1675 S390X_CAPBIT(S390X_AES_128)))
1676 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1677 (OPENSSL_s390xcap_P.kmac[0] & \
1678 S390X_CAPBIT(S390X_AES_192)))
1679 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1680 (OPENSSL_s390xcap_P.kmac[0] & \
1681 S390X_CAPBIT(S390X_AES_256)))
1682
1683 # define S390X_CCM_AAD_FLAG 0x40
1684
1685 /*-
1686 * Set nonce and length fields. Code is big-endian.
1687 */
1688 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1689 const unsigned char *nonce,
1690 size_t mlen)
1691 {
1692 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1693 ctx->aes.ccm.nonce.g[1] = mlen;
1694 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1695 }
1696
1697 /*-
1698 * Process additional authenticated data. Code is big-endian.
1699 */
1700 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1701 size_t alen)
1702 {
1703 unsigned char *ptr;
1704 int i, rem;
1705
1706 if (!alen)
1707 return;
1708
1709 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1710
1711 /* Suppress 'type-punned pointer dereference' warning. */
1712 ptr = ctx->aes.ccm.buf.b;
1713
1714 if (alen < ((1 << 16) - (1 << 8))) {
1715 *(uint16_t *)ptr = alen;
1716 i = 2;
1717 } else if (sizeof(alen) == 8
1718 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1719 *(uint16_t *)ptr = 0xffff;
1720 *(uint64_t *)(ptr + 2) = alen;
1721 i = 10;
1722 } else {
1723 *(uint16_t *)ptr = 0xfffe;
1724 *(uint32_t *)(ptr + 2) = alen;
1725 i = 6;
1726 }
1727
1728 while (i < 16 && alen) {
1729 ctx->aes.ccm.buf.b[i] = *aad;
1730 ++aad;
1731 --alen;
1732 ++i;
1733 }
1734 while (i < 16) {
1735 ctx->aes.ccm.buf.b[i] = 0;
1736 ++i;
1737 }
1738
1739 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1740 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1741 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1742 &ctx->aes.ccm.kmac_param);
1743 ctx->aes.ccm.blocks += 2;
1744
1745 rem = alen & 0xf;
1746 alen &= ~0xf;
1747 if (alen) {
1748 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1749 ctx->aes.ccm.blocks += alen >> 4;
1750 aad += alen;
1751 }
1752 if (rem) {
1753 for (i = 0; i < rem; i++)
1754 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1755
1756 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1757 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1758 ctx->aes.ccm.kmac_param.k);
1759 ctx->aes.ccm.blocks++;
1760 }
1761 }
1762
1763 /*-
1764 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1765 * success.
1766 */
1767 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1768 unsigned char *out, size_t len, int enc)
1769 {
1770 size_t n, rem;
1771 unsigned int i, l, num;
1772 unsigned char flags;
1773
1774 flags = ctx->aes.ccm.nonce.b[0];
1775 if (!(flags & S390X_CCM_AAD_FLAG)) {
1776 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1777 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1778 ctx->aes.ccm.blocks++;
1779 }
1780 l = flags & 0x7;
1781 ctx->aes.ccm.nonce.b[0] = l;
1782
1783 /*-
1784 * Reconstruct length from encoded length field
1785 * and initialize it with counter value.
1786 */
1787 n = 0;
1788 for (i = 15 - l; i < 15; i++) {
1789 n |= ctx->aes.ccm.nonce.b[i];
1790 ctx->aes.ccm.nonce.b[i] = 0;
1791 n <<= 8;
1792 }
1793 n |= ctx->aes.ccm.nonce.b[15];
1794 ctx->aes.ccm.nonce.b[15] = 1;
1795
1796 if (n != len)
1797 return -1; /* length mismatch */
1798
1799 if (enc) {
1800 /* Two operations per block plus one for tag encryption */
1801 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1802 if (ctx->aes.ccm.blocks > (1ULL << 61))
1803 return -2; /* too much data */
1804 }
1805
1806 num = 0;
1807 rem = len & 0xf;
1808 len &= ~0xf;
1809
1810 if (enc) {
1811 /* mac-then-encrypt */
1812 if (len)
1813 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1814 if (rem) {
1815 for (i = 0; i < rem; i++)
1816 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1817
1818 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1819 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1820 ctx->aes.ccm.kmac_param.k);
1821 }
1822
1823 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1824 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1825 &num, (ctr128_f)AES_ctr32_encrypt);
1826 } else {
1827 /* decrypt-then-mac */
1828 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1829 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1830 &num, (ctr128_f)AES_ctr32_encrypt);
1831
1832 if (len)
1833 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1834 if (rem) {
1835 for (i = 0; i < rem; i++)
1836 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1837
1838 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1839 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1840 ctx->aes.ccm.kmac_param.k);
1841 }
1842 }
1843 /* encrypt tag */
1844 for (i = 15 - l; i < 16; i++)
1845 ctx->aes.ccm.nonce.b[i] = 0;
1846
1847 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1848 ctx->aes.ccm.kmac_param.k);
1849 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1850 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1851
1852 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
1853 return 0;
1854 }
1855
1856 /*-
1857 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1858 * if successful. Otherwise -1 is returned.
1859 */
1860 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1861 const unsigned char *in, size_t len)
1862 {
1863 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1864 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1865 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1866 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1867
1868 if (out != in
1869 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1870 return -1;
1871
1872 if (enc) {
1873 /* Set explicit iv (sequence number). */
1874 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1875 }
1876
1877 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1878 /*-
1879 * Get explicit iv (sequence number). We already have fixed iv
1880 * (server/client_write_iv) here.
1881 */
1882 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1883 s390x_aes_ccm_setiv(cctx, ivec, len);
1884
1885 /* Process aad (sequence number|type|version|length) */
1886 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1887
1888 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1889 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1890
1891 if (enc) {
1892 if (s390x_aes_ccm(cctx, in, out, len, enc))
1893 return -1;
1894
1895 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1896 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1897 } else {
1898 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1899 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1900 cctx->aes.ccm.m))
1901 return len;
1902 }
1903
1904 OPENSSL_cleanse(out, len);
1905 return -1;
1906 }
1907 }
1908
1909 /*-
1910 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1911 * returned.
1912 */
1913 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1914 const unsigned char *key,
1915 const unsigned char *iv, int enc)
1916 {
1917 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1918 unsigned char *ivec;
1919 int keylen;
1920
1921 if (iv == NULL && key == NULL)
1922 return 1;
1923
1924 if (key != NULL) {
1925 keylen = EVP_CIPHER_CTX_key_length(ctx);
1926 /* Convert key size to function code. */
1927 cctx->aes.ccm.fc = S390X_AES_128 + (((keylen << 3) - 128) >> 6);
1928 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1929
1930 /* Store encoded m and l. */
1931 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1932 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1933 memset(cctx->aes.ccm.nonce.b + 1, 0,
1934 sizeof(cctx->aes.ccm.nonce.b));
1935 cctx->aes.ccm.blocks = 0;
1936
1937 cctx->aes.ccm.key_set = 1;
1938 }
1939
1940 if (iv != NULL) {
1941 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1942 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
1943
1944 cctx->aes.ccm.iv_set = 1;
1945 }
1946
1947 return 1;
1948 }
1949
1950 /*-
1951 * Called from EVP layer to initialize context, process additional
1952 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1953 * plaintext or process a TLS packet, depending on context. Returns bytes
1954 * written on success. Otherwise -1 is returned.
1955 */
1956 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1957 const unsigned char *in, size_t len)
1958 {
1959 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1960 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1961 int rv;
1962 unsigned char *buf, *ivec;
1963
1964 if (!cctx->aes.ccm.key_set)
1965 return -1;
1966
1967 if (cctx->aes.ccm.tls_aad_len >= 0)
1968 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1969
1970 /*-
1971 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1972 * so integrity must be checked already at Update() i.e., before
1973 * potentially corrupted data is output.
1974 */
1975 if (in == NULL && out != NULL)
1976 return 0;
1977
1978 if (!cctx->aes.ccm.iv_set)
1979 return -1;
1980
1981 if (!enc && !cctx->aes.ccm.tag_set)
1982 return -1;
1983
1984 if (out == NULL) {
1985 /* Update(): Pass message length. */
1986 if (in == NULL) {
1987 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1988 s390x_aes_ccm_setiv(cctx, ivec, len);
1989
1990 cctx->aes.ccm.len_set = 1;
1991 return len;
1992 }
1993
1994 /* Update(): Process aad. */
1995 if (!cctx->aes.ccm.len_set && len)
1996 return -1;
1997
1998 s390x_aes_ccm_aad(cctx, in, len);
1999 return len;
2000 }
2001
2002 /* Update(): Process message. */
2003
2004 if (!cctx->aes.ccm.len_set) {
2005 /*-
2006 * In case message length was not previously set explicitely via
2007 * Update(), set it now.
2008 */
2009 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2010 s390x_aes_ccm_setiv(cctx, ivec, len);
2011
2012 cctx->aes.ccm.len_set = 1;
2013 }
2014
2015 if (enc) {
2016 if (s390x_aes_ccm(cctx, in, out, len, enc))
2017 return -1;
2018
2019 cctx->aes.ccm.tag_set = 1;
2020 return len;
2021 } else {
2022 rv = -1;
2023
2024 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2025 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2026 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2027 cctx->aes.ccm.m))
2028 rv = len;
2029 }
2030
2031 if (rv == -1)
2032 OPENSSL_cleanse(out, len);
2033
2034 cctx->aes.ccm.iv_set = 0;
2035 cctx->aes.ccm.tag_set = 0;
2036 cctx->aes.ccm.len_set = 0;
2037 return rv;
2038 }
2039 }
2040
2041 /*-
2042 * Performs various operations on the context structure depending on control
2043 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2044 * Code is big-endian.
2045 */
2046 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2047 {
2048 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2049 unsigned char *buf, *iv;
2050 int enc, len;
2051
2052 switch (type) {
2053 case EVP_CTRL_INIT:
2054 cctx->aes.ccm.key_set = 0;
2055 cctx->aes.ccm.iv_set = 0;
2056 cctx->aes.ccm.l = 8;
2057 cctx->aes.ccm.m = 12;
2058 cctx->aes.ccm.tag_set = 0;
2059 cctx->aes.ccm.len_set = 0;
2060 cctx->aes.ccm.tls_aad_len = -1;
2061 return 1;
2062
2063 case EVP_CTRL_AEAD_TLS1_AAD:
2064 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2065 return 0;
2066
2067 /* Save the aad for later use. */
2068 buf = EVP_CIPHER_CTX_buf_noconst(c);
2069 memcpy(buf, ptr, arg);
2070 cctx->aes.ccm.tls_aad_len = arg;
2071
2072 len = *(uint16_t *)(buf + arg - 2);
2073 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2074 return 0;
2075
2076 /* Correct length for explicit iv. */
2077 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2078
2079 enc = EVP_CIPHER_CTX_encrypting(c);
2080 if (!enc) {
2081 if (len < cctx->aes.ccm.m)
2082 return 0;
2083
2084 /* Correct length for tag. */
2085 len -= cctx->aes.ccm.m;
2086 }
2087
2088 *(uint16_t *)(buf + arg - 2) = len;
2089 /* Extra padding: tag appended to record. */
2090 return cctx->aes.ccm.m;
2091
2092 case EVP_CTRL_CCM_SET_IV_FIXED:
2093 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2094 return 0;
2095
2096 /* Copy to first part of the iv. */
2097 iv = EVP_CIPHER_CTX_iv_noconst(c);
2098 memcpy(iv, ptr, arg);
2099 return 1;
2100
2101 case EVP_CTRL_AEAD_SET_IVLEN:
2102 arg = 15 - arg;
2103 /* fall-through */
2104
2105 case EVP_CTRL_CCM_SET_L:
2106 if (arg < 2 || arg > 8)
2107 return 0;
2108
2109 cctx->aes.ccm.l = arg;
2110 return 1;
2111
2112 case EVP_CTRL_AEAD_SET_TAG:
2113 if ((arg & 1) || arg < 4 || arg > 16)
2114 return 0;
2115
2116 enc = EVP_CIPHER_CTX_encrypting(c);
2117 if (enc && ptr)
2118 return 0;
2119
2120 if (ptr) {
2121 cctx->aes.ccm.tag_set = 1;
2122 buf = EVP_CIPHER_CTX_buf_noconst(c);
2123 memcpy(buf, ptr, arg);
2124 }
2125
2126 cctx->aes.ccm.m = arg;
2127 return 1;
2128
2129 case EVP_CTRL_AEAD_GET_TAG:
2130 enc = EVP_CIPHER_CTX_encrypting(c);
2131 if (!enc || !cctx->aes.ccm.tag_set)
2132 return 0;
2133
2134 if(arg < cctx->aes.ccm.m)
2135 return 0;
2136
2137 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2138 cctx->aes.ccm.tag_set = 0;
2139 cctx->aes.ccm.iv_set = 0;
2140 cctx->aes.ccm.len_set = 0;
2141 return 1;
2142
2143 case EVP_CTRL_COPY:
2144 return 1;
2145
2146 default:
2147 return -1;
2148 }
2149 }
2150
2151 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2152
2153 # ifndef OPENSSL_NO_OCB
2154 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2155 # define S390X_aes_128_ocb_CAPABLE 0
2156 # define S390X_aes_192_ocb_CAPABLE 0
2157 # define S390X_aes_256_ocb_CAPABLE 0
2158
2159 # define s390x_aes_ocb_init_key aes_ocb_init_key
2160 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2161 const unsigned char *iv, int enc);
2162 # define s390x_aes_ocb_cipher aes_ocb_cipher
2163 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2164 const unsigned char *in, size_t len);
2165 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2166 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2167 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2168 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2169 # endif
2170
2171 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2172 MODE,flags) \
2173 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2174 nid##_##keylen##_##nmode,blocksize, \
2175 keylen / 8, \
2176 ivlen, \
2177 flags | EVP_CIPH_##MODE##_MODE, \
2178 s390x_aes_init_key, \
2179 s390x_aes_##mode##_cipher, \
2180 NULL, \
2181 sizeof(EVP_AES_KEY), \
2182 NULL, \
2183 NULL, \
2184 NULL, \
2185 NULL \
2186 }; \
2187 static const EVP_CIPHER aes_##keylen##_##mode = { \
2188 nid##_##keylen##_##nmode, \
2189 blocksize, \
2190 keylen / 8, \
2191 ivlen, \
2192 flags | EVP_CIPH_##MODE##_MODE, \
2193 aes_init_key, \
2194 aes_##mode##_cipher, \
2195 NULL, \
2196 sizeof(EVP_AES_KEY), \
2197 NULL,NULL,NULL,NULL \
2198 }; \
2199 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2200 { \
2201 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2202 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2203 }
2204
2205 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2206 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2207 nid##_##keylen##_##mode, \
2208 blocksize, \
2209 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2210 ivlen, \
2211 flags | EVP_CIPH_##MODE##_MODE, \
2212 s390x_aes_##mode##_init_key, \
2213 s390x_aes_##mode##_cipher, \
2214 s390x_aes_##mode##_cleanup, \
2215 sizeof(S390X_AES_##MODE##_CTX), \
2216 NULL, \
2217 NULL, \
2218 s390x_aes_##mode##_ctrl, \
2219 NULL \
2220 }; \
2221 static const EVP_CIPHER aes_##keylen##_##mode = { \
2222 nid##_##keylen##_##mode,blocksize, \
2223 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2224 ivlen, \
2225 flags | EVP_CIPH_##MODE##_MODE, \
2226 aes_##mode##_init_key, \
2227 aes_##mode##_cipher, \
2228 aes_##mode##_cleanup, \
2229 sizeof(EVP_AES_##MODE##_CTX), \
2230 NULL, \
2231 NULL, \
2232 aes_##mode##_ctrl, \
2233 NULL \
2234 }; \
2235 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2236 { \
2237 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2238 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2239 }
2240
2241 #else
2242
2243 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2244 static const EVP_CIPHER aes_##keylen##_##mode = { \
2245 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2246 flags|EVP_CIPH_##MODE##_MODE, \
2247 aes_init_key, \
2248 aes_##mode##_cipher, \
2249 NULL, \
2250 sizeof(EVP_AES_KEY), \
2251 NULL,NULL,NULL,NULL }; \
2252 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2253 { return &aes_##keylen##_##mode; }
2254
2255 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2256 static const EVP_CIPHER aes_##keylen##_##mode = { \
2257 nid##_##keylen##_##mode,blocksize, \
2258 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
2259 flags|EVP_CIPH_##MODE##_MODE, \
2260 aes_##mode##_init_key, \
2261 aes_##mode##_cipher, \
2262 aes_##mode##_cleanup, \
2263 sizeof(EVP_AES_##MODE##_CTX), \
2264 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2265 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2266 { return &aes_##keylen##_##mode; }
2267
2268 #endif
2269
2270 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2271 # include "arm_arch.h"
2272 # if __ARM_MAX_ARCH__>=7
2273 # if defined(BSAES_ASM)
2274 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2275 # endif
2276 # if defined(VPAES_ASM)
2277 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2278 # endif
2279 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2280 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2281 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2282 # define HWAES_encrypt aes_v8_encrypt
2283 # define HWAES_decrypt aes_v8_decrypt
2284 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2285 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2286 # endif
2287 #endif
2288
2289 #if defined(HWAES_CAPABLE)
2290 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2291 AES_KEY *key);
2292 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2293 AES_KEY *key);
2294 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2295 const AES_KEY *key);
2296 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2297 const AES_KEY *key);
2298 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2299 size_t length, const AES_KEY *key,
2300 unsigned char *ivec, const int enc);
2301 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2302 size_t len, const AES_KEY *key,
2303 const unsigned char ivec[16]);
2304 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2305 size_t len, const AES_KEY *key1,
2306 const AES_KEY *key2, const unsigned char iv[16]);
2307 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2308 size_t len, const AES_KEY *key1,
2309 const AES_KEY *key2, const unsigned char iv[16]);
2310 #endif
2311
2312 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2313 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2314 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2315 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2316 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2317 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2318 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2319 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2320
2321 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2322 const unsigned char *iv, int enc)
2323 {
2324 int ret, mode;
2325 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2326
2327 mode = EVP_CIPHER_CTX_mode(ctx);
2328 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2329 && !enc) {
2330 #ifdef HWAES_CAPABLE
2331 if (HWAES_CAPABLE) {
2332 ret = HWAES_set_decrypt_key(key,
2333 EVP_CIPHER_CTX_key_length(ctx) * 8,
2334 &dat->ks.ks);
2335 dat->block = (block128_f) HWAES_decrypt;
2336 dat->stream.cbc = NULL;
2337 # ifdef HWAES_cbc_encrypt
2338 if (mode == EVP_CIPH_CBC_MODE)
2339 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2340 # endif
2341 } else
2342 #endif
2343 #ifdef BSAES_CAPABLE
2344 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2345 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2346 &dat->ks.ks);
2347 dat->block = (block128_f) AES_decrypt;
2348 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2349 } else
2350 #endif
2351 #ifdef VPAES_CAPABLE
2352 if (VPAES_CAPABLE) {
2353 ret = vpaes_set_decrypt_key(key,
2354 EVP_CIPHER_CTX_key_length(ctx) * 8,
2355 &dat->ks.ks);
2356 dat->block = (block128_f) vpaes_decrypt;
2357 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2358 (cbc128_f) vpaes_cbc_encrypt : NULL;
2359 } else
2360 #endif
2361 {
2362 ret = AES_set_decrypt_key(key,
2363 EVP_CIPHER_CTX_key_length(ctx) * 8,
2364 &dat->ks.ks);
2365 dat->block = (block128_f) AES_decrypt;
2366 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2367 (cbc128_f) AES_cbc_encrypt : NULL;
2368 }
2369 } else
2370 #ifdef HWAES_CAPABLE
2371 if (HWAES_CAPABLE) {
2372 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2373 &dat->ks.ks);
2374 dat->block = (block128_f) HWAES_encrypt;
2375 dat->stream.cbc = NULL;
2376 # ifdef HWAES_cbc_encrypt
2377 if (mode == EVP_CIPH_CBC_MODE)
2378 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2379 else
2380 # endif
2381 # ifdef HWAES_ctr32_encrypt_blocks
2382 if (mode == EVP_CIPH_CTR_MODE)
2383 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2384 else
2385 # endif
2386 (void)0; /* terminate potentially open 'else' */
2387 } else
2388 #endif
2389 #ifdef BSAES_CAPABLE
2390 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2391 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2392 &dat->ks.ks);
2393 dat->block = (block128_f) AES_encrypt;
2394 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2395 } else
2396 #endif
2397 #ifdef VPAES_CAPABLE
2398 if (VPAES_CAPABLE) {
2399 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2400 &dat->ks.ks);
2401 dat->block = (block128_f) vpaes_encrypt;
2402 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2403 (cbc128_f) vpaes_cbc_encrypt : NULL;
2404 } else
2405 #endif
2406 {
2407 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2408 &dat->ks.ks);
2409 dat->block = (block128_f) AES_encrypt;
2410 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2411 (cbc128_f) AES_cbc_encrypt : NULL;
2412 #ifdef AES_CTR_ASM
2413 if (mode == EVP_CIPH_CTR_MODE)
2414 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2415 #endif
2416 }
2417
2418 if (ret < 0) {
2419 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2420 return 0;
2421 }
2422
2423 return 1;
2424 }
2425
2426 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2427 const unsigned char *in, size_t len)
2428 {
2429 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2430
2431 if (dat->stream.cbc)
2432 (*dat->stream.cbc) (in, out, len, &dat->ks,
2433 EVP_CIPHER_CTX_iv_noconst(ctx),
2434 EVP_CIPHER_CTX_encrypting(ctx));
2435 else if (EVP_CIPHER_CTX_encrypting(ctx))
2436 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2437 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2438 else
2439 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2440 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2441
2442 return 1;
2443 }
2444
2445 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2446 const unsigned char *in, size_t len)
2447 {
2448 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2449 size_t i;
2450 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2451
2452 if (len < bl)
2453 return 1;
2454
2455 for (i = 0, len -= bl; i <= len; i += bl)
2456 (*dat->block) (in + i, out + i, &dat->ks);
2457
2458 return 1;
2459 }
2460
2461 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2462 const unsigned char *in, size_t len)
2463 {
2464 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2465
2466 int num = EVP_CIPHER_CTX_num(ctx);
2467 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2468 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2469 EVP_CIPHER_CTX_set_num(ctx, num);
2470 return 1;
2471 }
2472
2473 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2474 const unsigned char *in, size_t len)
2475 {
2476 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2477
2478 int num = EVP_CIPHER_CTX_num(ctx);
2479 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2480 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2481 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2482 EVP_CIPHER_CTX_set_num(ctx, num);
2483 return 1;
2484 }
2485
2486 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2487 const unsigned char *in, size_t len)
2488 {
2489 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2490
2491 int num = EVP_CIPHER_CTX_num(ctx);
2492 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2493 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2494 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2495 EVP_CIPHER_CTX_set_num(ctx, num);
2496 return 1;
2497 }
2498
2499 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2500 const unsigned char *in, size_t len)
2501 {
2502 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2503
2504 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2505 int num = EVP_CIPHER_CTX_num(ctx);
2506 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2507 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2508 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2509 EVP_CIPHER_CTX_set_num(ctx, num);
2510 return 1;
2511 }
2512
2513 while (len >= MAXBITCHUNK) {
2514 int num = EVP_CIPHER_CTX_num(ctx);
2515 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2516 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2517 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2518 EVP_CIPHER_CTX_set_num(ctx, num);
2519 len -= MAXBITCHUNK;
2520 out += MAXBITCHUNK;
2521 in += MAXBITCHUNK;
2522 }
2523 if (len) {
2524 int num = EVP_CIPHER_CTX_num(ctx);
2525 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2526 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2527 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2528 EVP_CIPHER_CTX_set_num(ctx, num);
2529 }
2530
2531 return 1;
2532 }
2533
2534 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2535 const unsigned char *in, size_t len)
2536 {
2537 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2538 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2539
2540 if (dat->stream.ctr)
2541 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2542 EVP_CIPHER_CTX_iv_noconst(ctx),
2543 EVP_CIPHER_CTX_buf_noconst(ctx),
2544 &num, dat->stream.ctr);
2545 else
2546 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2547 EVP_CIPHER_CTX_iv_noconst(ctx),
2548 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2549 dat->block);
2550 EVP_CIPHER_CTX_set_num(ctx, num);
2551 return 1;
2552 }
2553
2554 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2555 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2556 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2557
2558 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2559 {
2560 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2561 if (gctx == NULL)
2562 return 0;
2563 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2564 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2565 OPENSSL_free(gctx->iv);
2566 return 1;
2567 }
2568
2569 /* increment counter (64-bit int) by 1 */
2570 static void ctr64_inc(unsigned char *counter)
2571 {
2572 int n = 8;
2573 unsigned char c;
2574
2575 do {
2576 --n;
2577 c = counter[n];
2578 ++c;
2579 counter[n] = c;
2580 if (c)
2581 return;
2582 } while (n);
2583 }
2584
2585 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2586 {
2587 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2588 switch (type) {
2589 case EVP_CTRL_INIT:
2590 gctx->key_set = 0;
2591 gctx->iv_set = 0;
2592 gctx->ivlen = EVP_CIPHER_CTX_iv_length(c);
2593 gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
2594 gctx->taglen = -1;
2595 gctx->iv_gen = 0;
2596 gctx->tls_aad_len = -1;
2597 return 1;
2598
2599 case EVP_CTRL_AEAD_SET_IVLEN:
2600 if (arg <= 0)
2601 return 0;
2602 /* Allocate memory for IV if needed */
2603 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2604 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2605 OPENSSL_free(gctx->iv);
2606 gctx->iv = OPENSSL_malloc(arg);
2607 if (gctx->iv == NULL)
2608 return 0;
2609 }
2610 gctx->ivlen = arg;
2611 return 1;
2612
2613 case EVP_CTRL_AEAD_SET_TAG:
2614 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
2615 return 0;
2616 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
2617 gctx->taglen = arg;
2618 return 1;
2619
2620 case EVP_CTRL_AEAD_GET_TAG:
2621 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
2622 || gctx->taglen < 0)
2623 return 0;
2624 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
2625 return 1;
2626
2627 case EVP_CTRL_GCM_SET_IV_FIXED:
2628 /* Special case: -1 length restores whole IV */
2629 if (arg == -1) {
2630 memcpy(gctx->iv, ptr, gctx->ivlen);
2631 gctx->iv_gen = 1;
2632 return 1;
2633 }
2634 /*
2635 * Fixed field must be at least 4 bytes and invocation field at least
2636 * 8.
2637 */
2638 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2639 return 0;
2640 if (arg)
2641 memcpy(gctx->iv, ptr, arg);
2642 if (EVP_CIPHER_CTX_encrypting(c)) {
2643 if (c->drbg != NULL) {
2644 if (RAND_DRBG_bytes(c->drbg, gctx->iv + arg, gctx->ivlen - arg) == 0)
2645 return 0;
2646 } else if (RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) {
2647 return 0;
2648 }
2649 }
2650 gctx->iv_gen = 1;
2651 return 1;
2652
2653 case EVP_CTRL_GCM_IV_GEN:
2654 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2655 return 0;
2656 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2657 if (arg <= 0 || arg > gctx->ivlen)
2658 arg = gctx->ivlen;
2659 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2660 /*
2661 * Invocation field will be at least 8 bytes in size and so no need
2662 * to check wrap around or increment more than last 8 bytes.
2663 */
2664 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2665 gctx->iv_set = 1;
2666 return 1;
2667
2668 case EVP_CTRL_GCM_SET_IV_INV:
2669 if (gctx->iv_gen == 0 || gctx->key_set == 0
2670 || EVP_CIPHER_CTX_encrypting(c))
2671 return 0;
2672 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2673 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2674 gctx->iv_set = 1;
2675 return 1;
2676
2677 case EVP_CTRL_AEAD_TLS1_AAD:
2678 /* Save the AAD for later use */
2679 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2680 return 0;
2681 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
2682 gctx->tls_aad_len = arg;
2683 {
2684 unsigned int len =
2685 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
2686 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
2687 /* Correct length for explicit IV */
2688 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2689 return 0;
2690 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2691 /* If decrypting correct for tag too */
2692 if (!EVP_CIPHER_CTX_encrypting(c)) {
2693 if (len < EVP_GCM_TLS_TAG_LEN)
2694 return 0;
2695 len -= EVP_GCM_TLS_TAG_LEN;
2696 }
2697 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
2698 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
2699 }
2700 /* Extra padding: tag appended to record */
2701 return EVP_GCM_TLS_TAG_LEN;
2702
2703 case EVP_CTRL_COPY:
2704 {
2705 EVP_CIPHER_CTX *out = ptr;
2706 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2707 if (gctx->gcm.key) {
2708 if (gctx->gcm.key != &gctx->ks)
2709 return 0;
2710 gctx_out->gcm.key = &gctx_out->ks;
2711 }
2712 if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
2713 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
2714 else {
2715 gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
2716 if (gctx_out->iv == NULL)
2717 return 0;
2718 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2719 }
2720 return 1;
2721 }
2722
2723 default:
2724 return -1;
2725
2726 }
2727 }
2728
2729 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2730 const unsigned char *iv, int enc)
2731 {
2732 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2733 if (!iv && !key)
2734 return 1;
2735 if (key) {
2736 do {
2737 #ifdef HWAES_CAPABLE
2738 if (HWAES_CAPABLE) {
2739 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2740 &gctx->ks.ks);
2741 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2742 (block128_f) HWAES_encrypt);
2743 # ifdef HWAES_ctr32_encrypt_blocks
2744 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2745 # else
2746 gctx->ctr = NULL;
2747 # endif
2748 break;
2749 } else
2750 #endif
2751 #ifdef BSAES_CAPABLE
2752 if (BSAES_CAPABLE) {
2753 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2754 &gctx->ks.ks);
2755 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2756 (block128_f) AES_encrypt);
2757 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2758 break;
2759 } else
2760 #endif
2761 #ifdef VPAES_CAPABLE
2762 if (VPAES_CAPABLE) {
2763 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2764 &gctx->ks.ks);
2765 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2766 (block128_f) vpaes_encrypt);
2767 gctx->ctr = NULL;
2768 break;
2769 } else
2770 #endif
2771 (void)0; /* terminate potentially open 'else' */
2772
2773 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2774 &gctx->ks.ks);
2775 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2776 (block128_f) AES_encrypt);
2777 #ifdef AES_CTR_ASM
2778 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
2779 #else
2780 gctx->ctr = NULL;
2781 #endif
2782 } while (0);
2783
2784 /*
2785 * If we have an iv can set it directly, otherwise use saved IV.
2786 */
2787 if (iv == NULL && gctx->iv_set)
2788 iv = gctx->iv;
2789 if (iv) {
2790 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2791 gctx->iv_set = 1;
2792 }
2793 gctx->key_set = 1;
2794 } else {
2795 /* If key set use IV, otherwise copy */
2796 if (gctx->key_set)
2797 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2798 else
2799 memcpy(gctx->iv, iv, gctx->ivlen);
2800 gctx->iv_set = 1;
2801 gctx->iv_gen = 0;
2802 }
2803 return 1;
2804 }
2805
2806 /*
2807 * Handle TLS GCM packet format. This consists of the last portion of the IV
2808 * followed by the payload and finally the tag. On encrypt generate IV,
2809 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2810 * and verify tag.
2811 */
2812
2813 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2814 const unsigned char *in, size_t len)
2815 {
2816 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2817 int rv = -1;
2818 /* Encrypt/decrypt must be performed in place */
2819 if (out != in
2820 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2821 return -1;
2822 /*
2823 * Set IV from start of buffer or generate IV and write to start of
2824 * buffer.
2825 */
2826 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
2827 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
2828 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2829 goto err;
2830 /* Use saved AAD */
2831 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
2832 gctx->tls_aad_len))
2833 goto err;
2834 /* Fix buffer and length to point to payload */
2835 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2836 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2837 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2838 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2839 /* Encrypt payload */
2840 if (gctx->ctr) {
2841 size_t bulk = 0;
2842 #if defined(AES_GCM_ASM)
2843 if (len >= 32 && AES_GCM_ASM(gctx)) {
2844 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2845 return -1;
2846
2847 bulk = AES_gcm_encrypt(in, out, len,
2848 gctx->gcm.key,
2849 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2850 gctx->gcm.len.u[1] += bulk;
2851 }
2852 #endif
2853 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2854 in + bulk,
2855 out + bulk,
2856 len - bulk, gctx->ctr))
2857 goto err;
2858 } else {
2859 size_t bulk = 0;
2860 #if defined(AES_GCM_ASM2)
2861 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2862 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2863 return -1;
2864
2865 bulk = AES_gcm_encrypt(in, out, len,
2866 gctx->gcm.key,
2867 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2868 gctx->gcm.len.u[1] += bulk;
2869 }
2870 #endif
2871 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2872 in + bulk, out + bulk, len - bulk))
2873 goto err;
2874 }
2875 out += len;
2876 /* Finally write tag */
2877 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2878 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2879 } else {
2880 /* Decrypt */
2881 if (gctx->ctr) {
2882 size_t bulk = 0;
2883 #if defined(AES_GCM_ASM)
2884 if (len >= 16 && AES_GCM_ASM(gctx)) {
2885 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2886 return -1;
2887
2888 bulk = AES_gcm_decrypt(in, out, len,
2889 gctx->gcm.key,
2890 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2891 gctx->gcm.len.u[1] += bulk;
2892 }
2893 #endif
2894 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2895 in + bulk,
2896 out + bulk,
2897 len - bulk, gctx->ctr))
2898 goto err;
2899 } else {
2900 size_t bulk = 0;
2901 #if defined(AES_GCM_ASM2)
2902 if (len >= 16 && AES_GCM_ASM2(gctx)) {
2903 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2904 return -1;
2905
2906 bulk = AES_gcm_decrypt(in, out, len,
2907 gctx->gcm.key,
2908 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2909 gctx->gcm.len.u[1] += bulk;
2910 }
2911 #endif
2912 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2913 in + bulk, out + bulk, len - bulk))
2914 goto err;
2915 }
2916 /* Retrieve tag */
2917 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
2918 EVP_GCM_TLS_TAG_LEN);
2919 /* If tag mismatch wipe buffer */
2920 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
2921 EVP_GCM_TLS_TAG_LEN)) {
2922 OPENSSL_cleanse(out, len);
2923 goto err;
2924 }
2925 rv = len;
2926 }
2927
2928 err:
2929 gctx->iv_set = 0;
2930 gctx->tls_aad_len = -1;
2931 return rv;
2932 }
2933
2934 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2935 const unsigned char *in, size_t len)
2936 {
2937 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2938 /* If not set up, return error */
2939 if (!gctx->key_set)
2940 return -1;
2941
2942 if (gctx->tls_aad_len >= 0)
2943 return aes_gcm_tls_cipher(ctx, out, in, len);
2944
2945 if (!gctx->iv_set)
2946 return -1;
2947 if (in) {
2948 if (out == NULL) {
2949 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2950 return -1;
2951 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
2952 if (gctx->ctr) {
2953 size_t bulk = 0;
2954 #if defined(AES_GCM_ASM)
2955 if (len >= 32 && AES_GCM_ASM(gctx)) {
2956 size_t res = (16 - gctx->gcm.mres) % 16;
2957
2958 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2959 return -1;
2960
2961 bulk = AES_gcm_encrypt(in + res,
2962 out + res, len - res,
2963 gctx->gcm.key, gctx->gcm.Yi.c,
2964 gctx->gcm.Xi.u);
2965 gctx->gcm.len.u[1] += bulk;
2966 bulk += res;
2967 }
2968 #endif
2969 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2970 in + bulk,
2971 out + bulk,
2972 len - bulk, gctx->ctr))
2973 return -1;
2974 } else {
2975 size_t bulk = 0;
2976 #if defined(AES_GCM_ASM2)
2977 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2978 size_t res = (16 - gctx->gcm.mres) % 16;
2979
2980 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2981 return -1;
2982
2983 bulk = AES_gcm_encrypt(in + res,
2984 out + res, len - res,
2985 gctx->gcm.key, gctx->gcm.Yi.c,
2986 gctx->gcm.Xi.u);
2987 gctx->gcm.len.u[1] += bulk;
2988 bulk += res;
2989 }
2990 #endif
2991 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2992 in + bulk, out + bulk, len - bulk))
2993 return -1;
2994 }
2995 } else {
2996 if (gctx->ctr) {
2997 size_t bulk = 0;
2998 #if defined(AES_GCM_ASM)
2999 if (len >= 16 && AES_GCM_ASM(gctx)) {
3000 size_t res = (16 - gctx->gcm.mres) % 16;
3001
3002 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3003 return -1;
3004
3005 bulk = AES_gcm_decrypt(in + res,
3006 out + res, len - res,
3007 gctx->gcm.key,
3008 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3009 gctx->gcm.len.u[1] += bulk;
3010 bulk += res;
3011 }
3012 #endif
3013 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3014 in + bulk,
3015 out + bulk,
3016 len - bulk, gctx->ctr))
3017 return -1;
3018 } else {
3019 size_t bulk = 0;
3020 #if defined(AES_GCM_ASM2)
3021 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3022 size_t res = (16 - gctx->gcm.mres) % 16;
3023
3024 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3025 return -1;
3026
3027 bulk = AES_gcm_decrypt(in + res,
3028 out + res, len - res,
3029 gctx->gcm.key,
3030 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3031 gctx->gcm.len.u[1] += bulk;
3032 bulk += res;
3033 }
3034 #endif
3035 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3036 in + bulk, out + bulk, len - bulk))
3037 return -1;
3038 }
3039 }
3040 return len;
3041 } else {
3042 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
3043 if (gctx->taglen < 0)
3044 return -1;
3045 if (CRYPTO_gcm128_finish(&gctx->gcm,
3046 EVP_CIPHER_CTX_buf_noconst(ctx),
3047 gctx->taglen) != 0)
3048 return -1;
3049 gctx->iv_set = 0;
3050 return 0;
3051 }
3052 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
3053 gctx->taglen = 16;
3054 /* Don't reuse the IV */
3055 gctx->iv_set = 0;
3056 return 0;
3057 }
3058
3059 }
3060
3061 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3062 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3063 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3064 | EVP_CIPH_CUSTOM_COPY)
3065
3066 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3067 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3068 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3069 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3070 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3071 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3072
3073 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3074 {
3075 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
3076 if (type == EVP_CTRL_COPY) {
3077 EVP_CIPHER_CTX *out = ptr;
3078 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3079 if (xctx->xts.key1) {
3080 if (xctx->xts.key1 != &xctx->ks1)
3081 return 0;
3082 xctx_out->xts.key1 = &xctx_out->ks1;
3083 }
3084 if (xctx->xts.key2) {
3085 if (xctx->xts.key2 != &xctx->ks2)
3086 return 0;
3087 xctx_out->xts.key2 = &xctx_out->ks2;
3088 }
3089 return 1;
3090 } else if (type != EVP_CTRL_INIT)
3091 return -1;
3092 /* key1 and key2 are used as an indicator both key and IV are set */
3093 xctx->xts.key1 = NULL;
3094 xctx->xts.key2 = NULL;
3095 return 1;
3096 }
3097
3098 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3099 const unsigned char *iv, int enc)
3100 {
3101 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3102 if (!iv && !key)
3103 return 1;
3104
3105 if (key)
3106 do {
3107 #ifdef AES_XTS_ASM
3108 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3109 #else
3110 xctx->stream = NULL;
3111 #endif
3112 /* key_len is two AES keys */
3113 #ifdef HWAES_CAPABLE
3114 if (HWAES_CAPABLE) {
3115 if (enc) {
3116 HWAES_set_encrypt_key(key,
3117 EVP_CIPHER_CTX_key_length(ctx) * 4,
3118 &xctx->ks1.ks);
3119 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3120 # ifdef HWAES_xts_encrypt
3121 xctx->stream = HWAES_xts_encrypt;
3122 # endif
3123 } else {
3124 HWAES_set_decrypt_key(key,
3125 EVP_CIPHER_CTX_key_length(ctx) * 4,
3126 &xctx->ks1.ks);
3127 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3128 # ifdef HWAES_xts_decrypt
3129 xctx->stream = HWAES_xts_decrypt;
3130 #endif
3131 }
3132
3133 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3134 EVP_CIPHER_CTX_key_length(ctx) * 4,
3135 &xctx->ks2.ks);
3136 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3137
3138 xctx->xts.key1 = &xctx->ks1;
3139 break;
3140 } else
3141 #endif
3142 #ifdef BSAES_CAPABLE
3143 if (BSAES_CAPABLE)
3144 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3145 else
3146 #endif
3147 #ifdef VPAES_CAPABLE
3148 if (VPAES_CAPABLE) {
3149 if (enc) {
3150 vpaes_set_encrypt_key(key,
3151 EVP_CIPHER_CTX_key_length(ctx) * 4,
3152 &xctx->ks1.ks);
3153 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3154 } else {
3155 vpaes_set_decrypt_key(key,
3156 EVP_CIPHER_CTX_key_length(ctx) * 4,
3157 &xctx->ks1.ks);
3158 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3159 }
3160
3161 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3162 EVP_CIPHER_CTX_key_length(ctx) * 4,
3163 &xctx->ks2.ks);
3164 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3165
3166 xctx->xts.key1 = &xctx->ks1;
3167 break;
3168 } else
3169 #endif
3170 (void)0; /* terminate potentially open 'else' */
3171
3172 if (enc) {
3173 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3174 &xctx->ks1.ks);
3175 xctx->xts.block1 = (block128_f) AES_encrypt;
3176 } else {
3177 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3178 &xctx->ks1.ks);
3179 xctx->xts.block1 = (block128_f) AES_decrypt;
3180 }
3181
3182 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3183 EVP_CIPHER_CTX_key_length(ctx) * 4,
3184 &xctx->ks2.ks);
3185 xctx->xts.block2 = (block128_f) AES_encrypt;
3186
3187 xctx->xts.key1 = &xctx->ks1;
3188 } while (0);
3189
3190 if (iv) {
3191 xctx->xts.key2 = &xctx->ks2;
3192 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3193 }
3194
3195 return 1;
3196 }
3197
3198 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3199 const unsigned char *in, size_t len)
3200 {
3201 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3202 if (!xctx->xts.key1 || !xctx->xts.key2)
3203 return 0;
3204 if (!out || !in || len < AES_BLOCK_SIZE)
3205 return 0;
3206 if (xctx->stream)
3207 (*xctx->stream) (in, out, len,
3208 xctx->xts.key1, xctx->xts.key2,
3209 EVP_CIPHER_CTX_iv_noconst(ctx));
3210 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3211 in, out, len,
3212 EVP_CIPHER_CTX_encrypting(ctx)))
3213 return 0;
3214 return 1;
3215 }
3216
3217 #define aes_xts_cleanup NULL
3218
3219 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3220 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3221 | EVP_CIPH_CUSTOM_COPY)
3222
3223 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3224 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3225
3226 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3227 {
3228 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3229 switch (type) {
3230 case EVP_CTRL_INIT:
3231 cctx->key_set = 0;
3232 cctx->iv_set = 0;
3233 cctx->L = 8;
3234 cctx->M = 12;
3235 cctx->tag_set = 0;
3236 cctx->len_set = 0;
3237 cctx->tls_aad_len = -1;
3238 return 1;
3239
3240 case EVP_CTRL_AEAD_TLS1_AAD:
3241 /* Save the AAD for later use */
3242 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3243 return 0;
3244 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3245 cctx->tls_aad_len = arg;
3246 {
3247 uint16_t len =
3248 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3249 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3250 /* Correct length for explicit IV */
3251 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3252 return 0;
3253 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3254 /* If decrypting correct for tag too */
3255 if (!EVP_CIPHER_CTX_encrypting(c)) {
3256 if (len < cctx->M)
3257 return 0;
3258 len -= cctx->M;
3259 }
3260 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3261 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3262 }
3263 /* Extra padding: tag appended to record */
3264 return cctx->M;
3265
3266 case EVP_CTRL_CCM_SET_IV_FIXED:
3267 /* Sanity check length */
3268 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3269 return 0;
3270 /* Just copy to first part of IV */
3271 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3272 return 1;
3273
3274 case EVP_CTRL_AEAD_SET_IVLEN:
3275 arg = 15 - arg;
3276 /* fall thru */
3277 case EVP_CTRL_CCM_SET_L:
3278 if (arg < 2 || arg > 8)
3279 return 0;
3280 cctx->L = arg;
3281 return 1;
3282
3283 case EVP_CTRL_AEAD_SET_TAG:
3284 if ((arg & 1) || arg < 4 || arg > 16)
3285 return 0;
3286 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3287 return 0;
3288 if (ptr) {
3289 cctx->tag_set = 1;
3290 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3291 }
3292 cctx->M = arg;
3293 return 1;
3294
3295 case EVP_CTRL_AEAD_GET_TAG:
3296 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3297 return 0;
3298 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3299 return 0;
3300 cctx->tag_set = 0;
3301 cctx->iv_set = 0;
3302 cctx->len_set = 0;
3303 return 1;
3304
3305 case EVP_CTRL_COPY:
3306 {
3307 EVP_CIPHER_CTX *out = ptr;
3308 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3309 if (cctx->ccm.key) {
3310 if (cctx->ccm.key != &cctx->ks)
3311 return 0;
3312 cctx_out->ccm.key = &cctx_out->ks;
3313 }
3314 return 1;
3315 }
3316
3317 default:
3318 return -1;
3319
3320 }
3321 }
3322
3323 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3324 const unsigned char *iv, int enc)
3325 {
3326 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3327 if (!iv && !key)
3328 return 1;
3329 if (key)
3330 do {
3331 #ifdef HWAES_CAPABLE
3332 if (HWAES_CAPABLE) {
3333 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3334 &cctx->ks.ks);
3335
3336 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3337 &cctx->ks, (block128_f) HWAES_encrypt);
3338 cctx->str = NULL;
3339 cctx->key_set = 1;
3340 break;
3341 } else
3342 #endif
3343 #ifdef VPAES_CAPABLE
3344 if (VPAES_CAPABLE) {
3345 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3346 &cctx->ks.ks);
3347 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3348 &cctx->ks, (block128_f) vpaes_encrypt);
3349 cctx->str = NULL;
3350 cctx->key_set = 1;
3351 break;
3352 }
3353 #endif
3354 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3355 &cctx->ks.ks);
3356 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3357 &cctx->ks, (block128_f) AES_encrypt);
3358 cctx->str = NULL;
3359 cctx->key_set = 1;
3360 } while (0);
3361 if (iv) {
3362 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3363 cctx->iv_set = 1;
3364 }
3365 return 1;
3366 }
3367
3368 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3369 const unsigned char *in, size_t len)
3370 {
3371 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3372 CCM128_CONTEXT *ccm = &cctx->ccm;
3373 /* Encrypt/decrypt must be performed in place */
3374 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3375 return -1;
3376 /* If encrypting set explicit IV from sequence number (start of AAD) */
3377 if (EVP_CIPHER_CTX_encrypting(ctx))
3378 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3379 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3380 /* Get rest of IV from explicit IV */
3381 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3382 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3383 /* Correct length value */
3384 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3385 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3386 len))
3387 return -1;
3388 /* Use saved AAD */
3389 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3390 /* Fix buffer to point to payload */
3391 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3392 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3393 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3394 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3395 cctx->str) :
3396 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3397 return -1;
3398 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3399 return -1;
3400 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3401 } else {
3402 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3403 cctx->str) :
3404 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3405 unsigned char tag[16];
3406 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3407 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3408 return len;
3409 }
3410 }
3411 OPENSSL_cleanse(out, len);
3412 return -1;
3413 }
3414 }
3415
3416 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3417 const unsigned char *in, size_t len)
3418 {
3419 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3420 CCM128_CONTEXT *ccm = &cctx->ccm;
3421 /* If not set up, return error */
3422 if (!cctx->key_set)
3423 return -1;
3424
3425 if (cctx->tls_aad_len >= 0)
3426 return aes_ccm_tls_cipher(ctx, out, in, len);
3427
3428 /* EVP_*Final() doesn't return any data */
3429 if (in == NULL && out != NULL)
3430 return 0;
3431
3432 if (!cctx->iv_set)
3433 return -1;
3434
3435 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3436 return -1;
3437 if (!out) {
3438 if (!in) {
3439 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3440 15 - cctx->L, len))
3441 return -1;
3442 cctx->len_set = 1;
3443 return len;
3444 }
3445 /* If have AAD need message length */
3446 if (!cctx->len_set && len)
3447 return -1;
3448 CRYPTO_ccm128_aad(ccm, in, len);
3449 return len;
3450 }
3451 /* If not set length yet do it */
3452 if (!cctx->len_set) {
3453 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3454 15 - cctx->L, len))
3455 return -1;
3456 cctx->len_set = 1;
3457 }
3458 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3459 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3460 cctx->str) :
3461 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3462 return -1;
3463 cctx->tag_set = 1;
3464 return len;
3465 } else {
3466 int rv = -1;
3467 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3468 cctx->str) :
3469 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3470 unsigned char tag[16];
3471 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3472 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3473 cctx->M))
3474 rv = len;
3475 }
3476 }
3477 if (rv == -1)
3478 OPENSSL_cleanse(out, len);
3479 cctx->iv_set = 0;
3480 cctx->tag_set = 0;
3481 cctx->len_set = 0;
3482 return rv;
3483 }
3484 }
3485
3486 #define aes_ccm_cleanup NULL
3487
3488 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3489 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3490 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3491 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3492 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3493 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3494
3495 typedef struct {
3496 union {
3497 double align;
3498 AES_KEY ks;
3499 } ks;
3500 /* Indicates if IV has been set */
3501 unsigned char *iv;
3502 } EVP_AES_WRAP_CTX;
3503
3504 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3505 const unsigned char *iv, int enc)
3506 {
3507 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3508 if (!iv && !key)
3509 return 1;
3510 if (key) {
3511 if (EVP_CIPHER_CTX_encrypting(ctx))
3512 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3513 &wctx->ks.ks);
3514 else
3515 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3516 &wctx->ks.ks);
3517 if (!iv)
3518 wctx->iv = NULL;
3519 }
3520 if (iv) {
3521 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3522 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3523 }
3524 return 1;
3525 }
3526
3527 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3528 const unsigned char *in, size_t inlen)
3529 {
3530 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3531 size_t rv;
3532 /* AES wrap with padding has IV length of 4, without padding 8 */
3533 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3534 /* No final operation so always return zero length */
3535 if (!in)
3536 return 0;
3537 /* Input length must always be non-zero */
3538 if (!inlen)
3539 return -1;
3540 /* If decrypting need at least 16 bytes and multiple of 8 */
3541 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3542 return -1;
3543 /* If not padding input must be multiple of 8 */
3544 if (!pad && inlen & 0x7)
3545 return -1;
3546 if (is_partially_overlapping(out, in, inlen)) {
3547 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3548 return 0;
3549 }
3550 if (!out) {
3551 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3552 /* If padding round up to multiple of 8 */
3553 if (pad)
3554 inlen = (inlen + 7) / 8 * 8;
3555 /* 8 byte prefix */
3556 return inlen + 8;
3557 } else {
3558 /*
3559 * If not padding output will be exactly 8 bytes smaller than
3560 * input. If padding it will be at least 8 bytes smaller but we
3561 * don't know how much.
3562 */
3563 return inlen - 8;
3564 }
3565 }
3566 if (pad) {
3567 if (EVP_CIPHER_CTX_encrypting(ctx))
3568 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3569 out, in, inlen,
3570 (block128_f) AES_encrypt);
3571 else
3572 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3573 out, in, inlen,
3574 (block128_f) AES_decrypt);
3575 } else {
3576 if (EVP_CIPHER_CTX_encrypting(ctx))
3577 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3578 out, in, inlen, (block128_f) AES_encrypt);
3579 else
3580 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3581 out, in, inlen, (block128_f) AES_decrypt);
3582 }
3583 return rv ? (int)rv : -1;
3584 }
3585
3586 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3587 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3588 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3589
3590 static const EVP_CIPHER aes_128_wrap = {
3591 NID_id_aes128_wrap,
3592 8, 16, 8, WRAP_FLAGS,
3593 aes_wrap_init_key, aes_wrap_cipher,
3594 NULL,
3595 sizeof(EVP_AES_WRAP_CTX),
3596 NULL, NULL, NULL, NULL
3597 };
3598
3599 const EVP_CIPHER *EVP_aes_128_wrap(void)
3600 {
3601 return &aes_128_wrap;
3602 }
3603
3604 static const EVP_CIPHER aes_192_wrap = {
3605 NID_id_aes192_wrap,
3606 8, 24, 8, WRAP_FLAGS,
3607 aes_wrap_init_key, aes_wrap_cipher,
3608 NULL,
3609 sizeof(EVP_AES_WRAP_CTX),
3610 NULL, NULL, NULL, NULL
3611 };
3612
3613 const EVP_CIPHER *EVP_aes_192_wrap(void)
3614 {
3615 return &aes_192_wrap;
3616 }
3617
3618 static const EVP_CIPHER aes_256_wrap = {
3619 NID_id_aes256_wrap,
3620 8, 32, 8, WRAP_FLAGS,
3621 aes_wrap_init_key, aes_wrap_cipher,
3622 NULL,
3623 sizeof(EVP_AES_WRAP_CTX),
3624 NULL, NULL, NULL, NULL
3625 };
3626
3627 const EVP_CIPHER *EVP_aes_256_wrap(void)
3628 {
3629 return &aes_256_wrap;
3630 }
3631
3632 static const EVP_CIPHER aes_128_wrap_pad = {
3633 NID_id_aes128_wrap_pad,
3634 8, 16, 4, WRAP_FLAGS,
3635 aes_wrap_init_key, aes_wrap_cipher,
3636 NULL,
3637 sizeof(EVP_AES_WRAP_CTX),
3638 NULL, NULL, NULL, NULL
3639 };
3640
3641 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3642 {
3643 return &aes_128_wrap_pad;
3644 }
3645
3646 static const EVP_CIPHER aes_192_wrap_pad = {
3647 NID_id_aes192_wrap_pad,
3648 8, 24, 4, WRAP_FLAGS,
3649 aes_wrap_init_key, aes_wrap_cipher,
3650 NULL,
3651 sizeof(EVP_AES_WRAP_CTX),
3652 NULL, NULL, NULL, NULL
3653 };
3654
3655 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3656 {
3657 return &aes_192_wrap_pad;
3658 }
3659
3660 static const EVP_CIPHER aes_256_wrap_pad = {
3661 NID_id_aes256_wrap_pad,
3662 8, 32, 4, WRAP_FLAGS,
3663 aes_wrap_init_key, aes_wrap_cipher,
3664 NULL,
3665 sizeof(EVP_AES_WRAP_CTX),
3666 NULL, NULL, NULL, NULL
3667 };
3668
3669 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3670 {
3671 return &aes_256_wrap_pad;
3672 }
3673
3674 #ifndef OPENSSL_NO_OCB
3675 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3676 {
3677 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3678 EVP_CIPHER_CTX *newc;
3679 EVP_AES_OCB_CTX *new_octx;
3680
3681 switch (type) {
3682 case EVP_CTRL_INIT:
3683 octx->key_set = 0;
3684 octx->iv_set = 0;
3685 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
3686 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3687 octx->taglen = 16;
3688 octx->data_buf_len = 0;
3689 octx->aad_buf_len = 0;
3690 return 1;
3691
3692 case EVP_CTRL_AEAD_SET_IVLEN:
3693 /* IV len must be 1 to 15 */
3694 if (arg <= 0 || arg > 15)
3695 return 0;
3696
3697 octx->ivlen = arg;
3698 return 1;
3699
3700 case EVP_CTRL_AEAD_SET_TAG:
3701 if (!ptr) {
3702 /* Tag len must be 0 to 16 */
3703 if (arg < 0 || arg > 16)
3704 return 0;
3705
3706 octx->taglen = arg;
3707 return 1;
3708 }
3709 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
3710 return 0;
3711 memcpy(octx->tag, ptr, arg);
3712 return 1;
3713
3714 case EVP_CTRL_AEAD_GET_TAG:
3715 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
3716 return 0;
3717
3718 memcpy(ptr, octx->tag, arg);
3719 return 1;
3720
3721 case EVP_CTRL_COPY:
3722 newc = (EVP_CIPHER_CTX *)ptr;
3723 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
3724 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3725 &new_octx->ksenc.ks,
3726 &new_octx->ksdec.ks);
3727
3728 default:
3729 return -1;
3730
3731 }
3732 }
3733
3734 # ifdef HWAES_CAPABLE
3735 # ifdef HWAES_ocb_encrypt
3736 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
3737 size_t blocks, const void *key,
3738 size_t start_block_num,
3739 unsigned char offset_i[16],
3740 const unsigned char L_[][16],
3741 unsigned char checksum[16]);
3742 # else
3743 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
3744 # endif
3745 # ifdef HWAES_ocb_decrypt
3746 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
3747 size_t blocks, const void *key,
3748 size_t start_block_num,
3749 unsigned char offset_i[16],
3750 const unsigned char L_[][16],
3751 unsigned char checksum[16]);
3752 # else
3753 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
3754 # endif
3755 # endif
3756
3757 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3758 const unsigned char *iv, int enc)
3759 {
3760 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3761 if (!iv && !key)
3762 return 1;
3763 if (key) {
3764 do {
3765 /*
3766 * We set both the encrypt and decrypt key here because decrypt
3767 * needs both. We could possibly optimise to remove setting the
3768 * decrypt for an encryption operation.
3769 */
3770 # ifdef HWAES_CAPABLE
3771 if (HWAES_CAPABLE) {
3772 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3773 &octx->ksenc.ks);
3774 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3775 &octx->ksdec.ks);
3776 if (!CRYPTO_ocb128_init(&octx->ocb,
3777 &octx->ksenc.ks, &octx->ksdec.ks,
3778 (block128_f) HWAES_encrypt,
3779 (block128_f) HWAES_decrypt,
3780 enc ? HWAES_ocb_encrypt
3781 : HWAES_ocb_decrypt))
3782 return 0;
3783 break;
3784 }
3785 # endif
3786 # ifdef VPAES_CAPABLE
3787 if (VPAES_CAPABLE) {
3788 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3789 &octx->ksenc.ks);
3790 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3791 &octx->ksdec.ks);
3792 if (!CRYPTO_ocb128_init(&octx->ocb,
3793 &octx->ksenc.ks, &octx->ksdec.ks,
3794 (block128_f) vpaes_encrypt,
3795 (block128_f) vpaes_decrypt,
3796 NULL))
3797 return 0;
3798 break;
3799 }
3800 # endif
3801 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3802 &octx->ksenc.ks);
3803 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3804 &octx->ksdec.ks);
3805 if (!CRYPTO_ocb128_init(&octx->ocb,
3806 &octx->ksenc.ks, &octx->ksdec.ks,
3807 (block128_f) AES_encrypt,
3808 (block128_f) AES_decrypt,
3809 NULL))
3810 return 0;
3811 }
3812 while (0);
3813
3814 /*
3815 * If we have an iv we can set it directly, otherwise use saved IV.
3816 */
3817 if (iv == NULL && octx->iv_set)
3818 iv = octx->iv;
3819 if (iv) {
3820 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3821 != 1)
3822 return 0;
3823 octx->iv_set = 1;
3824 }
3825 octx->key_set = 1;
3826 } else {
3827 /* If key set use IV, otherwise copy */
3828 if (octx->key_set)
3829 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3830 else
3831 memcpy(octx->iv, iv, octx->ivlen);
3832 octx->iv_set = 1;
3833 }
3834 return 1;
3835 }
3836
3837 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3838 const unsigned char *in, size_t len)
3839 {
3840 unsigned char *buf;
3841 int *buf_len;
3842 int written_len = 0;
3843 size_t trailing_len;
3844 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3845
3846 /* If IV or Key not set then return error */
3847 if (!octx->iv_set)
3848 return -1;
3849
3850 if (!octx->key_set)
3851 return -1;
3852
3853 if (in != NULL) {
3854 /*
3855 * Need to ensure we are only passing full blocks to low level OCB
3856 * routines. We do it here rather than in EVP_EncryptUpdate/
3857 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3858 * and those routines don't support that
3859 */
3860
3861 /* Are we dealing with AAD or normal data here? */
3862 if (out == NULL) {
3863 buf = octx->aad_buf;
3864 buf_len = &(octx->aad_buf_len);
3865 } else {
3866 buf = octx->data_buf;
3867 buf_len = &(octx->data_buf_len);
3868
3869 if (is_partially_overlapping(out + *buf_len, in, len)) {
3870 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3871 return 0;
3872 }
3873 }
3874
3875 /*
3876 * If we've got a partially filled buffer from a previous call then
3877 * use that data first
3878 */
3879 if (*buf_len > 0) {
3880 unsigned int remaining;
3881
3882 remaining = AES_BLOCK_SIZE - (*buf_len);
3883 if (remaining > len) {
3884 memcpy(buf + (*buf_len), in, len);
3885 *(buf_len) += len;
3886 return 0;
3887 }
3888 memcpy(buf + (*buf_len), in, remaining);
3889
3890 /*
3891 * If we get here we've filled the buffer, so process it
3892 */
3893 len -= remaining;
3894 in += remaining;
3895 if (out == NULL) {
3896 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
3897 return -1;
3898 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
3899 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3900 AES_BLOCK_SIZE))
3901 return -1;
3902 } else {
3903 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3904 AES_BLOCK_SIZE))
3905 return -1;
3906 }
3907 written_len = AES_BLOCK_SIZE;
3908 *buf_len = 0;
3909 if (out != NULL)
3910 out += AES_BLOCK_SIZE;
3911 }
3912
3913 /* Do we have a partial block to handle at the end? */
3914 trailing_len = len % AES_BLOCK_SIZE;
3915
3916 /*
3917 * If we've got some full blocks to handle, then process these first
3918 */
3919 if (len != trailing_len) {
3920 if (out == NULL) {
3921 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3922 return -1;
3923 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
3924 if (!CRYPTO_ocb128_encrypt
3925 (&octx->ocb, in, out, len - trailing_len))
3926 return -1;
3927 } else {
3928 if (!CRYPTO_ocb128_decrypt
3929 (&octx->ocb, in, out, len - trailing_len))
3930 return -1;
3931 }
3932 written_len += len - trailing_len;
3933 in += len - trailing_len;
3934 }
3935
3936 /* Handle any trailing partial block */
3937 if (trailing_len > 0) {
3938 memcpy(buf, in, trailing_len);
3939 *buf_len = trailing_len;
3940 }
3941
3942 return written_len;
3943 } else {
3944 /*
3945 * First of all empty the buffer of any partial block that we might
3946 * have been provided - both for data and AAD
3947 */
3948 if (octx->data_buf_len > 0) {
3949 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3950 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
3951 octx->data_buf_len))
3952 return -1;
3953 } else {
3954 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
3955 octx->data_buf_len))
3956 return -1;
3957 }
3958 written_len = octx->data_buf_len;
3959 octx->data_buf_len = 0;
3960 }
3961 if (octx->aad_buf_len > 0) {
3962 if (!CRYPTO_ocb128_aad
3963 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
3964 return -1;
3965 octx->aad_buf_len = 0;
3966 }
3967 /* If decrypting then verify */
3968 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
3969 if (octx->taglen < 0)
3970 return -1;
3971 if (CRYPTO_ocb128_finish(&octx->ocb,
3972 octx->tag, octx->taglen) != 0)
3973 return -1;
3974 octx->iv_set = 0;
3975 return written_len;
3976 }
3977 /* If encrypting then just get the tag */
3978 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
3979 return -1;
3980 /* Don't reuse the IV */
3981 octx->iv_set = 0;
3982 return written_len;
3983 }
3984 }
3985
3986 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
3987 {
3988 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3989 CRYPTO_ocb128_cleanup(&octx->ocb);
3990 return 1;
3991 }
3992
3993 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
3994 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3995 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
3996 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3997 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
3998 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3999 #endif /* OPENSSL_NO_OCB */