]> git.ipfire.org Git - thirdparty/openssl.git/blob - crypto/evp/e_aes.c
Remove #error from include files.
[thirdparty/openssl.git] / crypto / evp / e_aes.c
1 /* ====================================================================
2 * Copyright (c) 2001-2014 The OpenSSL Project. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 *
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in
13 * the documentation and/or other materials provided with the
14 * distribution.
15 *
16 * 3. All advertising materials mentioning features or use of this
17 * software must display the following acknowledgment:
18 * "This product includes software developed by the OpenSSL Project
19 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20 *
21 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22 * endorse or promote products derived from this software without
23 * prior written permission. For written permission, please contact
24 * openssl-core@openssl.org.
25 *
26 * 5. Products derived from this software may not be called "OpenSSL"
27 * nor may "OpenSSL" appear in their names without prior written
28 * permission of the OpenSSL Project.
29 *
30 * 6. Redistributions of any form whatsoever must retain the following
31 * acknowledgment:
32 * "This product includes software developed by the OpenSSL Project
33 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34 *
35 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
39 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46 * OF THE POSSIBILITY OF SUCH DAMAGE.
47 * ====================================================================
48 *
49 */
50
51 #include <openssl/opensslconf.h>
52 #ifndef OPENSSL_NO_AES
53 # include <openssl/crypto.h>
54 # include <openssl/evp.h>
55 # include <openssl/err.h>
56 # include <string.h>
57 # include <assert.h>
58 # include <openssl/aes.h>
59 # include "internal/evp_int.h"
60 # include "modes_lcl.h"
61 # include <openssl/rand.h>
62
63 typedef struct {
64 union {
65 double align;
66 AES_KEY ks;
67 } ks;
68 block128_f block;
69 union {
70 cbc128_f cbc;
71 ctr128_f ctr;
72 } stream;
73 } EVP_AES_KEY;
74
75 typedef struct {
76 union {
77 double align;
78 AES_KEY ks;
79 } ks; /* AES key schedule to use */
80 int key_set; /* Set if key initialised */
81 int iv_set; /* Set if an iv is set */
82 GCM128_CONTEXT gcm;
83 unsigned char *iv; /* Temporary IV store */
84 int ivlen; /* IV length */
85 int taglen;
86 int iv_gen; /* It is OK to generate IVs */
87 int tls_aad_len; /* TLS AAD length */
88 ctr128_f ctr;
89 } EVP_AES_GCM_CTX;
90
91 typedef struct {
92 union {
93 double align;
94 AES_KEY ks;
95 } ks1, ks2; /* AES key schedules to use */
96 XTS128_CONTEXT xts;
97 void (*stream) (const unsigned char *in,
98 unsigned char *out, size_t length,
99 const AES_KEY *key1, const AES_KEY *key2,
100 const unsigned char iv[16]);
101 } EVP_AES_XTS_CTX;
102
103 typedef struct {
104 union {
105 double align;
106 AES_KEY ks;
107 } ks; /* AES key schedule to use */
108 int key_set; /* Set if key initialised */
109 int iv_set; /* Set if an iv is set */
110 int tag_set; /* Set if tag is valid */
111 int len_set; /* Set if message length set */
112 int L, M; /* L and M parameters from RFC3610 */
113 int tls_aad_len; /* TLS AAD length */
114 CCM128_CONTEXT ccm;
115 ccm128_f str;
116 } EVP_AES_CCM_CTX;
117
118 # ifndef OPENSSL_NO_OCB
119 typedef struct {
120 union {
121 double align;
122 AES_KEY ks;
123 } ksenc; /* AES key schedule to use for encryption */
124 union {
125 double align;
126 AES_KEY ks;
127 } ksdec; /* AES key schedule to use for decryption */
128 int key_set; /* Set if key initialised */
129 int iv_set; /* Set if an iv is set */
130 OCB128_CONTEXT ocb;
131 unsigned char *iv; /* Temporary IV store */
132 unsigned char tag[16];
133 unsigned char data_buf[16]; /* Store partial data blocks */
134 unsigned char aad_buf[16]; /* Store partial AAD blocks */
135 int data_buf_len;
136 int aad_buf_len;
137 int ivlen; /* IV length */
138 int taglen;
139 } EVP_AES_OCB_CTX;
140 # endif
141
142 # define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
143
144 # ifdef VPAES_ASM
145 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
146 AES_KEY *key);
147 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
148 AES_KEY *key);
149
150 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
151 const AES_KEY *key);
152 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
153 const AES_KEY *key);
154
155 void vpaes_cbc_encrypt(const unsigned char *in,
156 unsigned char *out,
157 size_t length,
158 const AES_KEY *key, unsigned char *ivec, int enc);
159 # endif
160 # ifdef BSAES_ASM
161 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
162 size_t length, const AES_KEY *key,
163 unsigned char ivec[16], int enc);
164 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
165 size_t len, const AES_KEY *key,
166 const unsigned char ivec[16]);
167 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
168 size_t len, const AES_KEY *key1,
169 const AES_KEY *key2, const unsigned char iv[16]);
170 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
171 size_t len, const AES_KEY *key1,
172 const AES_KEY *key2, const unsigned char iv[16]);
173 # endif
174 # ifdef AES_CTR_ASM
175 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
176 size_t blocks, const AES_KEY *key,
177 const unsigned char ivec[AES_BLOCK_SIZE]);
178 # endif
179 # ifdef AES_XTS_ASM
180 void AES_xts_encrypt(const char *inp, char *out, size_t len,
181 const AES_KEY *key1, const AES_KEY *key2,
182 const unsigned char iv[16]);
183 void AES_xts_decrypt(const char *inp, char *out, size_t len,
184 const AES_KEY *key1, const AES_KEY *key2,
185 const unsigned char iv[16]);
186 # endif
187
188 # if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
189 # include "ppc_arch.h"
190 # ifdef VPAES_ASM
191 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
192 # endif
193 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
194 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
195 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
196 # define HWAES_encrypt aes_p8_encrypt
197 # define HWAES_decrypt aes_p8_decrypt
198 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
199 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
200 # endif
201
202 # if defined(AES_ASM) && !defined(I386_ONLY) && ( \
203 ((defined(__i386) || defined(__i386__) || \
204 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
205 defined(__x86_64) || defined(__x86_64__) || \
206 defined(_M_AMD64) || defined(_M_X64) || \
207 defined(__INTEL__) )
208
209 extern unsigned int OPENSSL_ia32cap_P[];
210
211 # ifdef VPAES_ASM
212 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
213 # endif
214 # ifdef BSAES_ASM
215 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
216 # endif
217 /*
218 * AES-NI section
219 */
220 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
221
222 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
223 AES_KEY *key);
224 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
225 AES_KEY *key);
226
227 void aesni_encrypt(const unsigned char *in, unsigned char *out,
228 const AES_KEY *key);
229 void aesni_decrypt(const unsigned char *in, unsigned char *out,
230 const AES_KEY *key);
231
232 void aesni_ecb_encrypt(const unsigned char *in,
233 unsigned char *out,
234 size_t length, const AES_KEY *key, int enc);
235 void aesni_cbc_encrypt(const unsigned char *in,
236 unsigned char *out,
237 size_t length,
238 const AES_KEY *key, unsigned char *ivec, int enc);
239
240 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
241 unsigned char *out,
242 size_t blocks,
243 const void *key, const unsigned char *ivec);
244
245 void aesni_xts_encrypt(const unsigned char *in,
246 unsigned char *out,
247 size_t length,
248 const AES_KEY *key1, const AES_KEY *key2,
249 const unsigned char iv[16]);
250
251 void aesni_xts_decrypt(const unsigned char *in,
252 unsigned char *out,
253 size_t length,
254 const AES_KEY *key1, const AES_KEY *key2,
255 const unsigned char iv[16]);
256
257 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
258 unsigned char *out,
259 size_t blocks,
260 const void *key,
261 const unsigned char ivec[16],
262 unsigned char cmac[16]);
263
264 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
265 unsigned char *out,
266 size_t blocks,
267 const void *key,
268 const unsigned char ivec[16],
269 unsigned char cmac[16]);
270
271 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
272 size_t aesni_gcm_encrypt(const unsigned char *in,
273 unsigned char *out,
274 size_t len,
275 const void *key, unsigned char ivec[16], u64 *Xi);
276 # define AES_gcm_encrypt aesni_gcm_encrypt
277 size_t aesni_gcm_decrypt(const unsigned char *in,
278 unsigned char *out,
279 size_t len,
280 const void *key, unsigned char ivec[16], u64 *Xi);
281 # define AES_gcm_decrypt aesni_gcm_decrypt
282 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
283 size_t len);
284 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
285 gctx->gcm.ghash==gcm_ghash_avx)
286 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
287 gctx->gcm.ghash==gcm_ghash_avx)
288 # undef AES_GCM_ASM2 /* minor size optimization */
289 # endif
290
291 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
292 const unsigned char *iv, int enc)
293 {
294 int ret, mode;
295 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
296
297 mode = EVP_CIPHER_CTX_mode(ctx);
298 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
299 && !enc) {
300 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
301 &dat->ks.ks);
302 dat->block = (block128_f) aesni_decrypt;
303 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
304 (cbc128_f) aesni_cbc_encrypt : NULL;
305 } else {
306 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
307 &dat->ks.ks);
308 dat->block = (block128_f) aesni_encrypt;
309 if (mode == EVP_CIPH_CBC_MODE)
310 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
311 else if (mode == EVP_CIPH_CTR_MODE)
312 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
313 else
314 dat->stream.cbc = NULL;
315 }
316
317 if (ret < 0) {
318 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
319 return 0;
320 }
321
322 return 1;
323 }
324
325 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
326 const unsigned char *in, size_t len)
327 {
328 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
329 EVP_CIPHER_CTX_iv_noconst(ctx),
330 EVP_CIPHER_CTX_encrypting(ctx));
331
332 return 1;
333 }
334
335 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
336 const unsigned char *in, size_t len)
337 {
338 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
339
340 if (len < bl)
341 return 1;
342
343 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
344 EVP_CIPHER_CTX_encrypting(ctx));
345
346 return 1;
347 }
348
349 # define aesni_ofb_cipher aes_ofb_cipher
350 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
351 const unsigned char *in, size_t len);
352
353 # define aesni_cfb_cipher aes_cfb_cipher
354 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
355 const unsigned char *in, size_t len);
356
357 # define aesni_cfb8_cipher aes_cfb8_cipher
358 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
359 const unsigned char *in, size_t len);
360
361 # define aesni_cfb1_cipher aes_cfb1_cipher
362 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
363 const unsigned char *in, size_t len);
364
365 # define aesni_ctr_cipher aes_ctr_cipher
366 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
367 const unsigned char *in, size_t len);
368
369 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
370 const unsigned char *iv, int enc)
371 {
372 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
373 if (!iv && !key)
374 return 1;
375 if (key) {
376 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
377 &gctx->ks.ks);
378 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
379 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
380 /*
381 * If we have an iv can set it directly, otherwise use saved IV.
382 */
383 if (iv == NULL && gctx->iv_set)
384 iv = gctx->iv;
385 if (iv) {
386 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
387 gctx->iv_set = 1;
388 }
389 gctx->key_set = 1;
390 } else {
391 /* If key set use IV, otherwise copy */
392 if (gctx->key_set)
393 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
394 else
395 memcpy(gctx->iv, iv, gctx->ivlen);
396 gctx->iv_set = 1;
397 gctx->iv_gen = 0;
398 }
399 return 1;
400 }
401
402 # define aesni_gcm_cipher aes_gcm_cipher
403 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
404 const unsigned char *in, size_t len);
405
406 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
407 const unsigned char *iv, int enc)
408 {
409 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
410 if (!iv && !key)
411 return 1;
412
413 if (key) {
414 /* key_len is two AES keys */
415 if (enc) {
416 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
417 &xctx->ks1.ks);
418 xctx->xts.block1 = (block128_f) aesni_encrypt;
419 xctx->stream = aesni_xts_encrypt;
420 } else {
421 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
422 &xctx->ks1.ks);
423 xctx->xts.block1 = (block128_f) aesni_decrypt;
424 xctx->stream = aesni_xts_decrypt;
425 }
426
427 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
428 EVP_CIPHER_CTX_key_length(ctx) * 4,
429 &xctx->ks2.ks);
430 xctx->xts.block2 = (block128_f) aesni_encrypt;
431
432 xctx->xts.key1 = &xctx->ks1;
433 }
434
435 if (iv) {
436 xctx->xts.key2 = &xctx->ks2;
437 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
438 }
439
440 return 1;
441 }
442
443 # define aesni_xts_cipher aes_xts_cipher
444 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
445 const unsigned char *in, size_t len);
446
447 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
448 const unsigned char *iv, int enc)
449 {
450 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
451 if (!iv && !key)
452 return 1;
453 if (key) {
454 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
455 &cctx->ks.ks);
456 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
457 &cctx->ks, (block128_f) aesni_encrypt);
458 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
459 (ccm128_f) aesni_ccm64_decrypt_blocks;
460 cctx->key_set = 1;
461 }
462 if (iv) {
463 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
464 cctx->iv_set = 1;
465 }
466 return 1;
467 }
468
469 # define aesni_ccm_cipher aes_ccm_cipher
470 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
471 const unsigned char *in, size_t len);
472
473 # ifndef OPENSSL_NO_OCB
474 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
475 size_t blocks, const void *key,
476 size_t start_block_num,
477 unsigned char offset_i[16],
478 const unsigned char L_[][16],
479 unsigned char checksum[16]);
480 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
481 size_t blocks, const void *key,
482 size_t start_block_num,
483 unsigned char offset_i[16],
484 const unsigned char L_[][16],
485 unsigned char checksum[16]);
486
487 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
488 const unsigned char *iv, int enc)
489 {
490 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
491 if (!iv && !key)
492 return 1;
493 if (key) {
494 do {
495 /*
496 * We set both the encrypt and decrypt key here because decrypt
497 * needs both. We could possibly optimise to remove setting the
498 * decrypt for an encryption operation.
499 */
500 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
501 &octx->ksenc.ks);
502 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
503 &octx->ksdec.ks);
504 if (!CRYPTO_ocb128_init(&octx->ocb,
505 &octx->ksenc.ks, &octx->ksdec.ks,
506 (block128_f) aesni_encrypt,
507 (block128_f) aesni_decrypt,
508 enc ? aesni_ocb_encrypt
509 : aesni_ocb_decrypt))
510 return 0;
511 }
512 while (0);
513
514 /*
515 * If we have an iv we can set it directly, otherwise use saved IV.
516 */
517 if (iv == NULL && octx->iv_set)
518 iv = octx->iv;
519 if (iv) {
520 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
521 != 1)
522 return 0;
523 octx->iv_set = 1;
524 }
525 octx->key_set = 1;
526 } else {
527 /* If key set use IV, otherwise copy */
528 if (octx->key_set)
529 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
530 else
531 memcpy(octx->iv, iv, octx->ivlen);
532 octx->iv_set = 1;
533 }
534 return 1;
535 }
536
537 # define aesni_ocb_cipher aes_ocb_cipher
538 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
539 const unsigned char *in, size_t len);
540 # endif /* OPENSSL_NO_OCB */
541
542 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
543 static const EVP_CIPHER aesni_##keylen##_##mode = { \
544 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
545 flags|EVP_CIPH_##MODE##_MODE, \
546 aesni_init_key, \
547 aesni_##mode##_cipher, \
548 NULL, \
549 sizeof(EVP_AES_KEY), \
550 NULL,NULL,NULL,NULL }; \
551 static const EVP_CIPHER aes_##keylen##_##mode = { \
552 nid##_##keylen##_##nmode,blocksize, \
553 keylen/8,ivlen, \
554 flags|EVP_CIPH_##MODE##_MODE, \
555 aes_init_key, \
556 aes_##mode##_cipher, \
557 NULL, \
558 sizeof(EVP_AES_KEY), \
559 NULL,NULL,NULL,NULL }; \
560 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
561 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
562
563 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
564 static const EVP_CIPHER aesni_##keylen##_##mode = { \
565 nid##_##keylen##_##mode,blocksize, \
566 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
567 flags|EVP_CIPH_##MODE##_MODE, \
568 aesni_##mode##_init_key, \
569 aesni_##mode##_cipher, \
570 aes_##mode##_cleanup, \
571 sizeof(EVP_AES_##MODE##_CTX), \
572 NULL,NULL,aes_##mode##_ctrl,NULL }; \
573 static const EVP_CIPHER aes_##keylen##_##mode = { \
574 nid##_##keylen##_##mode,blocksize, \
575 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
576 flags|EVP_CIPH_##MODE##_MODE, \
577 aes_##mode##_init_key, \
578 aes_##mode##_cipher, \
579 aes_##mode##_cleanup, \
580 sizeof(EVP_AES_##MODE##_CTX), \
581 NULL,NULL,aes_##mode##_ctrl,NULL }; \
582 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
583 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
584
585 # elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
586
587 # include "sparc_arch.h"
588
589 extern unsigned int OPENSSL_sparcv9cap_P[];
590
591 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
592
593 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
594 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
595 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
596 const AES_KEY *key);
597 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
598 const AES_KEY *key);
599 /*
600 * Key-length specific subroutines were chosen for following reason.
601 * Each SPARC T4 core can execute up to 8 threads which share core's
602 * resources. Loading as much key material to registers allows to
603 * minimize references to shared memory interface, as well as amount
604 * of instructions in inner loops [much needed on T4]. But then having
605 * non-key-length specific routines would require conditional branches
606 * either in inner loops or on subroutines' entries. Former is hardly
607 * acceptable, while latter means code size increase to size occupied
608 * by multiple key-length specific subroutines, so why fight?
609 */
610 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
611 size_t len, const AES_KEY *key,
612 unsigned char *ivec);
613 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
614 size_t len, const AES_KEY *key,
615 unsigned char *ivec);
616 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
617 size_t len, const AES_KEY *key,
618 unsigned char *ivec);
619 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
620 size_t len, const AES_KEY *key,
621 unsigned char *ivec);
622 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
623 size_t len, const AES_KEY *key,
624 unsigned char *ivec);
625 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
626 size_t len, const AES_KEY *key,
627 unsigned char *ivec);
628 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
629 size_t blocks, const AES_KEY *key,
630 unsigned char *ivec);
631 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
632 size_t blocks, const AES_KEY *key,
633 unsigned char *ivec);
634 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
635 size_t blocks, const AES_KEY *key,
636 unsigned char *ivec);
637 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
638 size_t blocks, const AES_KEY *key1,
639 const AES_KEY *key2, const unsigned char *ivec);
640 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
641 size_t blocks, const AES_KEY *key1,
642 const AES_KEY *key2, const unsigned char *ivec);
643 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
644 size_t blocks, const AES_KEY *key1,
645 const AES_KEY *key2, const unsigned char *ivec);
646 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
647 size_t blocks, const AES_KEY *key1,
648 const AES_KEY *key2, const unsigned char *ivec);
649
650 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
651 const unsigned char *iv, int enc)
652 {
653 int ret, mode, bits;
654 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
655
656 mode = EVP_CIPHER_CTX_mode(ctx);
657 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
658 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
659 && !enc) {
660 ret = 0;
661 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
662 dat->block = (block128_f) aes_t4_decrypt;
663 switch (bits) {
664 case 128:
665 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
666 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
667 break;
668 case 192:
669 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
670 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
671 break;
672 case 256:
673 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
674 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
675 break;
676 default:
677 ret = -1;
678 }
679 } else {
680 ret = 0;
681 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
682 dat->block = (block128_f) aes_t4_encrypt;
683 switch (bits) {
684 case 128:
685 if (mode == EVP_CIPH_CBC_MODE)
686 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
687 else if (mode == EVP_CIPH_CTR_MODE)
688 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
689 else
690 dat->stream.cbc = NULL;
691 break;
692 case 192:
693 if (mode == EVP_CIPH_CBC_MODE)
694 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
695 else if (mode == EVP_CIPH_CTR_MODE)
696 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
697 else
698 dat->stream.cbc = NULL;
699 break;
700 case 256:
701 if (mode == EVP_CIPH_CBC_MODE)
702 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
703 else if (mode == EVP_CIPH_CTR_MODE)
704 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
705 else
706 dat->stream.cbc = NULL;
707 break;
708 default:
709 ret = -1;
710 }
711 }
712
713 if (ret < 0) {
714 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
715 return 0;
716 }
717
718 return 1;
719 }
720
721 # define aes_t4_cbc_cipher aes_cbc_cipher
722 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
723 const unsigned char *in, size_t len);
724
725 # define aes_t4_ecb_cipher aes_ecb_cipher
726 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
727 const unsigned char *in, size_t len);
728
729 # define aes_t4_ofb_cipher aes_ofb_cipher
730 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
731 const unsigned char *in, size_t len);
732
733 # define aes_t4_cfb_cipher aes_cfb_cipher
734 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
735 const unsigned char *in, size_t len);
736
737 # define aes_t4_cfb8_cipher aes_cfb8_cipher
738 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
739 const unsigned char *in, size_t len);
740
741 # define aes_t4_cfb1_cipher aes_cfb1_cipher
742 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
743 const unsigned char *in, size_t len);
744
745 # define aes_t4_ctr_cipher aes_ctr_cipher
746 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
747 const unsigned char *in, size_t len);
748
749 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
750 const unsigned char *iv, int enc)
751 {
752 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
753 if (!iv && !key)
754 return 1;
755 if (key) {
756 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
757 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
758 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
759 (block128_f) aes_t4_encrypt);
760 switch (bits) {
761 case 128:
762 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
763 break;
764 case 192:
765 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
766 break;
767 case 256:
768 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
769 break;
770 default:
771 return 0;
772 }
773 /*
774 * If we have an iv can set it directly, otherwise use saved IV.
775 */
776 if (iv == NULL && gctx->iv_set)
777 iv = gctx->iv;
778 if (iv) {
779 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
780 gctx->iv_set = 1;
781 }
782 gctx->key_set = 1;
783 } else {
784 /* If key set use IV, otherwise copy */
785 if (gctx->key_set)
786 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
787 else
788 memcpy(gctx->iv, iv, gctx->ivlen);
789 gctx->iv_set = 1;
790 gctx->iv_gen = 0;
791 }
792 return 1;
793 }
794
795 # define aes_t4_gcm_cipher aes_gcm_cipher
796 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
797 const unsigned char *in, size_t len);
798
799 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
800 const unsigned char *iv, int enc)
801 {
802 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
803 if (!iv && !key)
804 return 1;
805
806 if (key) {
807 int bits = EVP_CIPHER_CTX_key_length(ctx) * 4;
808 xctx->stream = NULL;
809 /* key_len is two AES keys */
810 if (enc) {
811 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
812 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
813 switch (bits) {
814 case 128:
815 xctx->stream = aes128_t4_xts_encrypt;
816 break;
817 case 256:
818 xctx->stream = aes256_t4_xts_encrypt;
819 break;
820 default:
821 return 0;
822 }
823 } else {
824 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
825 &xctx->ks1.ks);
826 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
827 switch (bits) {
828 case 128:
829 xctx->stream = aes128_t4_xts_decrypt;
830 break;
831 case 256:
832 xctx->stream = aes256_t4_xts_decrypt;
833 break;
834 default:
835 return 0;
836 }
837 }
838
839 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
840 EVP_CIPHER_CTX_key_length(ctx) * 4,
841 &xctx->ks2.ks);
842 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
843
844 xctx->xts.key1 = &xctx->ks1;
845 }
846
847 if (iv) {
848 xctx->xts.key2 = &xctx->ks2;
849 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
850 }
851
852 return 1;
853 }
854
855 # define aes_t4_xts_cipher aes_xts_cipher
856 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
857 const unsigned char *in, size_t len);
858
859 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
860 const unsigned char *iv, int enc)
861 {
862 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
863 if (!iv && !key)
864 return 1;
865 if (key) {
866 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
867 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
868 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
869 &cctx->ks, (block128_f) aes_t4_encrypt);
870 cctx->str = NULL;
871 cctx->key_set = 1;
872 }
873 if (iv) {
874 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
875 cctx->iv_set = 1;
876 }
877 return 1;
878 }
879
880 # define aes_t4_ccm_cipher aes_ccm_cipher
881 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
882 const unsigned char *in, size_t len);
883
884 # ifndef OPENSSL_NO_OCB
885 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
886 const unsigned char *iv, int enc)
887 {
888 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
889 if (!iv && !key)
890 return 1;
891 if (key) {
892 do {
893 /*
894 * We set both the encrypt and decrypt key here because decrypt
895 * needs both. We could possibly optimise to remove setting the
896 * decrypt for an encryption operation.
897 */
898 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
899 &octx->ksenc.ks);
900 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
901 &octx->ksdec.ks);
902 if (!CRYPTO_ocb128_init(&octx->ocb,
903 &octx->ksenc.ks, &octx->ksdec.ks,
904 (block128_f) aes_t4_encrypt,
905 (block128_f) aes_t4_decrypt,
906 NULL))
907 return 0;
908 }
909 while (0);
910
911 /*
912 * If we have an iv we can set it directly, otherwise use saved IV.
913 */
914 if (iv == NULL && octx->iv_set)
915 iv = octx->iv;
916 if (iv) {
917 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
918 != 1)
919 return 0;
920 octx->iv_set = 1;
921 }
922 octx->key_set = 1;
923 } else {
924 /* If key set use IV, otherwise copy */
925 if (octx->key_set)
926 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
927 else
928 memcpy(octx->iv, iv, octx->ivlen);
929 octx->iv_set = 1;
930 }
931 return 1;
932 }
933
934 # define aes_t4_ocb_cipher aes_ocb_cipher
935 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
936 const unsigned char *in, size_t len);
937 # endif /* OPENSSL_NO_OCB */
938
939 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
940 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
941 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
942 flags|EVP_CIPH_##MODE##_MODE, \
943 aes_t4_init_key, \
944 aes_t4_##mode##_cipher, \
945 NULL, \
946 sizeof(EVP_AES_KEY), \
947 NULL,NULL,NULL,NULL }; \
948 static const EVP_CIPHER aes_##keylen##_##mode = { \
949 nid##_##keylen##_##nmode,blocksize, \
950 keylen/8,ivlen, \
951 flags|EVP_CIPH_##MODE##_MODE, \
952 aes_init_key, \
953 aes_##mode##_cipher, \
954 NULL, \
955 sizeof(EVP_AES_KEY), \
956 NULL,NULL,NULL,NULL }; \
957 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
958 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
959
960 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
961 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
962 nid##_##keylen##_##mode,blocksize, \
963 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
964 flags|EVP_CIPH_##MODE##_MODE, \
965 aes_t4_##mode##_init_key, \
966 aes_t4_##mode##_cipher, \
967 aes_##mode##_cleanup, \
968 sizeof(EVP_AES_##MODE##_CTX), \
969 NULL,NULL,aes_##mode##_ctrl,NULL }; \
970 static const EVP_CIPHER aes_##keylen##_##mode = { \
971 nid##_##keylen##_##mode,blocksize, \
972 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
973 flags|EVP_CIPH_##MODE##_MODE, \
974 aes_##mode##_init_key, \
975 aes_##mode##_cipher, \
976 aes_##mode##_cleanup, \
977 sizeof(EVP_AES_##MODE##_CTX), \
978 NULL,NULL,aes_##mode##_ctrl,NULL }; \
979 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
980 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
981
982 # else
983
984 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
985 static const EVP_CIPHER aes_##keylen##_##mode = { \
986 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
987 flags|EVP_CIPH_##MODE##_MODE, \
988 aes_init_key, \
989 aes_##mode##_cipher, \
990 NULL, \
991 sizeof(EVP_AES_KEY), \
992 NULL,NULL,NULL,NULL }; \
993 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
994 { return &aes_##keylen##_##mode; }
995
996 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
997 static const EVP_CIPHER aes_##keylen##_##mode = { \
998 nid##_##keylen##_##mode,blocksize, \
999 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
1000 flags|EVP_CIPH_##MODE##_MODE, \
1001 aes_##mode##_init_key, \
1002 aes_##mode##_cipher, \
1003 aes_##mode##_cleanup, \
1004 sizeof(EVP_AES_##MODE##_CTX), \
1005 NULL,NULL,aes_##mode##_ctrl,NULL }; \
1006 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
1007 { return &aes_##keylen##_##mode; }
1008
1009 # endif
1010
1011 # if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
1012 # include "arm_arch.h"
1013 # if __ARM_MAX_ARCH__>=7
1014 # if defined(BSAES_ASM)
1015 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
1016 # endif
1017 # if defined(VPAES_ASM)
1018 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
1019 # endif
1020 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
1021 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
1022 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
1023 # define HWAES_encrypt aes_v8_encrypt
1024 # define HWAES_decrypt aes_v8_decrypt
1025 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
1026 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
1027 # endif
1028 # endif
1029
1030 # if defined(HWAES_CAPABLE)
1031 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
1032 AES_KEY *key);
1033 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
1034 AES_KEY *key);
1035 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
1036 const AES_KEY *key);
1037 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
1038 const AES_KEY *key);
1039 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
1040 size_t length, const AES_KEY *key,
1041 unsigned char *ivec, const int enc);
1042 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
1043 size_t len, const AES_KEY *key,
1044 const unsigned char ivec[16]);
1045 # endif
1046
1047 # define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
1048 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1049 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1050 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1051 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1052 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
1053 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
1054 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
1055
1056 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1057 const unsigned char *iv, int enc)
1058 {
1059 int ret, mode;
1060 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1061
1062 mode = EVP_CIPHER_CTX_mode(ctx);
1063 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
1064 && !enc)
1065 # ifdef HWAES_CAPABLE
1066 if (HWAES_CAPABLE) {
1067 ret = HWAES_set_decrypt_key(key,
1068 EVP_CIPHER_CTX_key_length(ctx) * 8,
1069 &dat->ks.ks);
1070 dat->block = (block128_f) HWAES_decrypt;
1071 dat->stream.cbc = NULL;
1072 # ifdef HWAES_cbc_encrypt
1073 if (mode == EVP_CIPH_CBC_MODE)
1074 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
1075 # endif
1076 } else
1077 # endif
1078 # ifdef BSAES_CAPABLE
1079 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
1080 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1081 &dat->ks.ks);
1082 dat->block = (block128_f) AES_decrypt;
1083 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
1084 } else
1085 # endif
1086 # ifdef VPAES_CAPABLE
1087 if (VPAES_CAPABLE) {
1088 ret = vpaes_set_decrypt_key(key,
1089 EVP_CIPHER_CTX_key_length(ctx) * 8,
1090 &dat->ks.ks);
1091 dat->block = (block128_f) vpaes_decrypt;
1092 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1093 (cbc128_f) vpaes_cbc_encrypt : NULL;
1094 } else
1095 # endif
1096 {
1097 ret = AES_set_decrypt_key(key,
1098 EVP_CIPHER_CTX_key_length(ctx) * 8,
1099 &dat->ks.ks);
1100 dat->block = (block128_f) AES_decrypt;
1101 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1102 (cbc128_f) AES_cbc_encrypt : NULL;
1103 } else
1104 # ifdef HWAES_CAPABLE
1105 if (HWAES_CAPABLE) {
1106 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1107 &dat->ks.ks);
1108 dat->block = (block128_f) HWAES_encrypt;
1109 dat->stream.cbc = NULL;
1110 # ifdef HWAES_cbc_encrypt
1111 if (mode == EVP_CIPH_CBC_MODE)
1112 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
1113 else
1114 # endif
1115 # ifdef HWAES_ctr32_encrypt_blocks
1116 if (mode == EVP_CIPH_CTR_MODE)
1117 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
1118 else
1119 # endif
1120 (void)0; /* terminate potentially open 'else' */
1121 } else
1122 # endif
1123 # ifdef BSAES_CAPABLE
1124 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
1125 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1126 &dat->ks.ks);
1127 dat->block = (block128_f) AES_encrypt;
1128 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
1129 } else
1130 # endif
1131 # ifdef VPAES_CAPABLE
1132 if (VPAES_CAPABLE) {
1133 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1134 &dat->ks.ks);
1135 dat->block = (block128_f) vpaes_encrypt;
1136 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1137 (cbc128_f) vpaes_cbc_encrypt : NULL;
1138 } else
1139 # endif
1140 {
1141 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1142 &dat->ks.ks);
1143 dat->block = (block128_f) AES_encrypt;
1144 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1145 (cbc128_f) AES_cbc_encrypt : NULL;
1146 # ifdef AES_CTR_ASM
1147 if (mode == EVP_CIPH_CTR_MODE)
1148 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
1149 # endif
1150 }
1151
1152 if (ret < 0) {
1153 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
1154 return 0;
1155 }
1156
1157 return 1;
1158 }
1159
1160 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1161 const unsigned char *in, size_t len)
1162 {
1163 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1164
1165 if (dat->stream.cbc)
1166 (*dat->stream.cbc) (in, out, len, &dat->ks,
1167 EVP_CIPHER_CTX_iv_noconst(ctx),
1168 EVP_CIPHER_CTX_encrypting(ctx));
1169 else if (EVP_CIPHER_CTX_encrypting(ctx))
1170 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
1171 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
1172 else
1173 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
1174 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
1175
1176 return 1;
1177 }
1178
1179 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1180 const unsigned char *in, size_t len)
1181 {
1182 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
1183 size_t i;
1184 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1185
1186 if (len < bl)
1187 return 1;
1188
1189 for (i = 0, len -= bl; i <= len; i += bl)
1190 (*dat->block) (in + i, out + i, &dat->ks);
1191
1192 return 1;
1193 }
1194
1195 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1196 const unsigned char *in, size_t len)
1197 {
1198 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1199
1200 int num = EVP_CIPHER_CTX_num(ctx);
1201 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
1202 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
1203 EVP_CIPHER_CTX_set_num(ctx, num);
1204 return 1;
1205 }
1206
1207 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1208 const unsigned char *in, size_t len)
1209 {
1210 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1211
1212 int num = EVP_CIPHER_CTX_num(ctx);
1213 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
1214 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
1215 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
1216 EVP_CIPHER_CTX_set_num(ctx, num);
1217 return 1;
1218 }
1219
1220 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1221 const unsigned char *in, size_t len)
1222 {
1223 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1224
1225 int num = EVP_CIPHER_CTX_num(ctx);
1226 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
1227 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
1228 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
1229 EVP_CIPHER_CTX_set_num(ctx, num);
1230 return 1;
1231 }
1232
1233 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1234 const unsigned char *in, size_t len)
1235 {
1236 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1237
1238 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
1239 int num = EVP_CIPHER_CTX_num(ctx);
1240 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
1241 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
1242 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
1243 EVP_CIPHER_CTX_set_num(ctx, num);
1244 return 1;
1245 }
1246
1247 while (len >= MAXBITCHUNK) {
1248 int num = EVP_CIPHER_CTX_num(ctx);
1249 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
1250 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
1251 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
1252 EVP_CIPHER_CTX_set_num(ctx, num);
1253 len -= MAXBITCHUNK;
1254 }
1255 if (len) {
1256 int num = EVP_CIPHER_CTX_num(ctx);
1257 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
1258 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
1259 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
1260 EVP_CIPHER_CTX_set_num(ctx, num);
1261 }
1262
1263 return 1;
1264 }
1265
1266 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1267 const unsigned char *in, size_t len)
1268 {
1269 unsigned int num = EVP_CIPHER_CTX_num(ctx);
1270 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
1271
1272 if (dat->stream.ctr)
1273 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
1274 EVP_CIPHER_CTX_iv_noconst(ctx),
1275 EVP_CIPHER_CTX_buf_noconst(ctx),
1276 &num, dat->stream.ctr);
1277 else
1278 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
1279 EVP_CIPHER_CTX_iv_noconst(ctx),
1280 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
1281 dat->block);
1282 EVP_CIPHER_CTX_set_num(ctx, num);
1283 return 1;
1284 }
1285
1286 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
1287 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
1288 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
1289
1290 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1291 {
1292 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
1293 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
1294 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
1295 OPENSSL_free(gctx->iv);
1296 return 1;
1297 }
1298
1299 /* increment counter (64-bit int) by 1 */
1300 static void ctr64_inc(unsigned char *counter)
1301 {
1302 int n = 8;
1303 unsigned char c;
1304
1305 do {
1306 --n;
1307 c = counter[n];
1308 ++c;
1309 counter[n] = c;
1310 if (c)
1311 return;
1312 } while (n);
1313 }
1314
1315 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1316 {
1317 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
1318 switch (type) {
1319 case EVP_CTRL_INIT:
1320 gctx->key_set = 0;
1321 gctx->iv_set = 0;
1322 gctx->ivlen = EVP_CIPHER_CTX_iv_length(c);
1323 gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
1324 gctx->taglen = -1;
1325 gctx->iv_gen = 0;
1326 gctx->tls_aad_len = -1;
1327 return 1;
1328
1329 case EVP_CTRL_AEAD_SET_IVLEN:
1330 if (arg <= 0)
1331 return 0;
1332 /* Allocate memory for IV if needed */
1333 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
1334 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
1335 OPENSSL_free(gctx->iv);
1336 gctx->iv = OPENSSL_malloc(arg);
1337 if (gctx->iv == NULL)
1338 return 0;
1339 }
1340 gctx->ivlen = arg;
1341 return 1;
1342
1343 case EVP_CTRL_AEAD_SET_TAG:
1344 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
1345 return 0;
1346 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
1347 gctx->taglen = arg;
1348 return 1;
1349
1350 case EVP_CTRL_AEAD_GET_TAG:
1351 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
1352 || gctx->taglen < 0)
1353 return 0;
1354 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
1355 return 1;
1356
1357 case EVP_CTRL_GCM_SET_IV_FIXED:
1358 /* Special case: -1 length restores whole IV */
1359 if (arg == -1) {
1360 memcpy(gctx->iv, ptr, gctx->ivlen);
1361 gctx->iv_gen = 1;
1362 return 1;
1363 }
1364 /*
1365 * Fixed field must be at least 4 bytes and invocation field at least
1366 * 8.
1367 */
1368 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1369 return 0;
1370 if (arg)
1371 memcpy(gctx->iv, ptr, arg);
1372 if (EVP_CIPHER_CTX_encrypting(c)
1373 && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1374 return 0;
1375 gctx->iv_gen = 1;
1376 return 1;
1377
1378 case EVP_CTRL_GCM_IV_GEN:
1379 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1380 return 0;
1381 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
1382 if (arg <= 0 || arg > gctx->ivlen)
1383 arg = gctx->ivlen;
1384 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1385 /*
1386 * Invocation field will be at least 8 bytes in size and so no need
1387 * to check wrap around or increment more than last 8 bytes.
1388 */
1389 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1390 gctx->iv_set = 1;
1391 return 1;
1392
1393 case EVP_CTRL_GCM_SET_IV_INV:
1394 if (gctx->iv_gen == 0 || gctx->key_set == 0
1395 || EVP_CIPHER_CTX_encrypting(c))
1396 return 0;
1397 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1398 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
1399 gctx->iv_set = 1;
1400 return 1;
1401
1402 case EVP_CTRL_AEAD_TLS1_AAD:
1403 /* Save the AAD for later use */
1404 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1405 return 0;
1406 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
1407 gctx->tls_aad_len = arg;
1408 {
1409 unsigned int len =
1410 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
1411 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
1412 /* Correct length for explicit IV */
1413 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1414 /* If decrypting correct for tag too */
1415 if (!EVP_CIPHER_CTX_encrypting(c))
1416 len -= EVP_GCM_TLS_TAG_LEN;
1417 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
1418 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
1419 }
1420 /* Extra padding: tag appended to record */
1421 return EVP_GCM_TLS_TAG_LEN;
1422
1423 case EVP_CTRL_COPY:
1424 {
1425 EVP_CIPHER_CTX *out = ptr;
1426 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
1427 if (gctx->gcm.key) {
1428 if (gctx->gcm.key != &gctx->ks)
1429 return 0;
1430 gctx_out->gcm.key = &gctx_out->ks;
1431 }
1432 if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
1433 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1434 else {
1435 gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
1436 if (gctx_out->iv == NULL)
1437 return 0;
1438 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
1439 }
1440 return 1;
1441 }
1442
1443 default:
1444 return -1;
1445
1446 }
1447 }
1448
1449 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1450 const unsigned char *iv, int enc)
1451 {
1452 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
1453 if (!iv && !key)
1454 return 1;
1455 if (key) {
1456 do {
1457 # ifdef HWAES_CAPABLE
1458 if (HWAES_CAPABLE) {
1459 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1460 &gctx->ks.ks);
1461 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
1462 (block128_f) HWAES_encrypt);
1463 # ifdef HWAES_ctr32_encrypt_blocks
1464 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
1465 # else
1466 gctx->ctr = NULL;
1467 # endif
1468 break;
1469 } else
1470 # endif
1471 # ifdef BSAES_CAPABLE
1472 if (BSAES_CAPABLE) {
1473 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1474 &gctx->ks.ks);
1475 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
1476 (block128_f) AES_encrypt);
1477 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
1478 break;
1479 } else
1480 # endif
1481 # ifdef VPAES_CAPABLE
1482 if (VPAES_CAPABLE) {
1483 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1484 &gctx->ks.ks);
1485 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
1486 (block128_f) vpaes_encrypt);
1487 gctx->ctr = NULL;
1488 break;
1489 } else
1490 # endif
1491 (void)0; /* terminate potentially open 'else' */
1492
1493 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
1494 &gctx->ks.ks);
1495 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
1496 (block128_f) AES_encrypt);
1497 # ifdef AES_CTR_ASM
1498 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
1499 # else
1500 gctx->ctr = NULL;
1501 # endif
1502 } while (0);
1503
1504 /*
1505 * If we have an iv can set it directly, otherwise use saved IV.
1506 */
1507 if (iv == NULL && gctx->iv_set)
1508 iv = gctx->iv;
1509 if (iv) {
1510 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
1511 gctx->iv_set = 1;
1512 }
1513 gctx->key_set = 1;
1514 } else {
1515 /* If key set use IV, otherwise copy */
1516 if (gctx->key_set)
1517 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
1518 else
1519 memcpy(gctx->iv, iv, gctx->ivlen);
1520 gctx->iv_set = 1;
1521 gctx->iv_gen = 0;
1522 }
1523 return 1;
1524 }
1525
1526 /*
1527 * Handle TLS GCM packet format. This consists of the last portion of the IV
1528 * followed by the payload and finally the tag. On encrypt generate IV,
1529 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
1530 * and verify tag.
1531 */
1532
1533 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1534 const unsigned char *in, size_t len)
1535 {
1536 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
1537 int rv = -1;
1538 /* Encrypt/decrypt must be performed in place */
1539 if (out != in
1540 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1541 return -1;
1542 /*
1543 * Set IV from start of buffer or generate IV and write to start of
1544 * buffer.
1545 */
1546 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
1547 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
1548 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1549 goto err;
1550 /* Use saved AAD */
1551 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
1552 gctx->tls_aad_len))
1553 goto err;
1554 /* Fix buffer and length to point to payload */
1555 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1556 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1557 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1558 if (EVP_CIPHER_CTX_encrypting(ctx)) {
1559 /* Encrypt payload */
1560 if (gctx->ctr) {
1561 size_t bulk = 0;
1562 # if defined(AES_GCM_ASM)
1563 if (len >= 32 && AES_GCM_ASM(gctx)) {
1564 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
1565 return -1;
1566
1567 bulk = AES_gcm_encrypt(in, out, len,
1568 gctx->gcm.key,
1569 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1570 gctx->gcm.len.u[1] += bulk;
1571 }
1572 # endif
1573 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
1574 in + bulk,
1575 out + bulk,
1576 len - bulk, gctx->ctr))
1577 goto err;
1578 } else {
1579 size_t bulk = 0;
1580 # if defined(AES_GCM_ASM2)
1581 if (len >= 32 && AES_GCM_ASM2(gctx)) {
1582 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
1583 return -1;
1584
1585 bulk = AES_gcm_encrypt(in, out, len,
1586 gctx->gcm.key,
1587 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1588 gctx->gcm.len.u[1] += bulk;
1589 }
1590 # endif
1591 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
1592 in + bulk, out + bulk, len - bulk))
1593 goto err;
1594 }
1595 out += len;
1596 /* Finally write tag */
1597 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
1598 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1599 } else {
1600 /* Decrypt */
1601 if (gctx->ctr) {
1602 size_t bulk = 0;
1603 # if defined(AES_GCM_ASM)
1604 if (len >= 16 && AES_GCM_ASM(gctx)) {
1605 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
1606 return -1;
1607
1608 bulk = AES_gcm_decrypt(in, out, len,
1609 gctx->gcm.key,
1610 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1611 gctx->gcm.len.u[1] += bulk;
1612 }
1613 # endif
1614 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
1615 in + bulk,
1616 out + bulk,
1617 len - bulk, gctx->ctr))
1618 goto err;
1619 } else {
1620 size_t bulk = 0;
1621 # if defined(AES_GCM_ASM2)
1622 if (len >= 16 && AES_GCM_ASM2(gctx)) {
1623 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
1624 return -1;
1625
1626 bulk = AES_gcm_decrypt(in, out, len,
1627 gctx->gcm.key,
1628 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1629 gctx->gcm.len.u[1] += bulk;
1630 }
1631 # endif
1632 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
1633 in + bulk, out + bulk, len - bulk))
1634 goto err;
1635 }
1636 /* Retrieve tag */
1637 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
1638 EVP_GCM_TLS_TAG_LEN);
1639 /* If tag mismatch wipe buffer */
1640 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
1641 EVP_GCM_TLS_TAG_LEN)) {
1642 OPENSSL_cleanse(out, len);
1643 goto err;
1644 }
1645 rv = len;
1646 }
1647
1648 err:
1649 gctx->iv_set = 0;
1650 gctx->tls_aad_len = -1;
1651 return rv;
1652 }
1653
1654 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1655 const unsigned char *in, size_t len)
1656 {
1657 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
1658 /* If not set up, return error */
1659 if (!gctx->key_set)
1660 return -1;
1661
1662 if (gctx->tls_aad_len >= 0)
1663 return aes_gcm_tls_cipher(ctx, out, in, len);
1664
1665 if (!gctx->iv_set)
1666 return -1;
1667 if (in) {
1668 if (out == NULL) {
1669 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
1670 return -1;
1671 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
1672 if (gctx->ctr) {
1673 size_t bulk = 0;
1674 # if defined(AES_GCM_ASM)
1675 if (len >= 32 && AES_GCM_ASM(gctx)) {
1676 size_t res = (16 - gctx->gcm.mres) % 16;
1677
1678 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
1679 return -1;
1680
1681 bulk = AES_gcm_encrypt(in + res,
1682 out + res, len - res,
1683 gctx->gcm.key, gctx->gcm.Yi.c,
1684 gctx->gcm.Xi.u);
1685 gctx->gcm.len.u[1] += bulk;
1686 bulk += res;
1687 }
1688 # endif
1689 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
1690 in + bulk,
1691 out + bulk,
1692 len - bulk, gctx->ctr))
1693 return -1;
1694 } else {
1695 size_t bulk = 0;
1696 # if defined(AES_GCM_ASM2)
1697 if (len >= 32 && AES_GCM_ASM2(gctx)) {
1698 size_t res = (16 - gctx->gcm.mres) % 16;
1699
1700 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
1701 return -1;
1702
1703 bulk = AES_gcm_encrypt(in + res,
1704 out + res, len - res,
1705 gctx->gcm.key, gctx->gcm.Yi.c,
1706 gctx->gcm.Xi.u);
1707 gctx->gcm.len.u[1] += bulk;
1708 bulk += res;
1709 }
1710 # endif
1711 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
1712 in + bulk, out + bulk, len - bulk))
1713 return -1;
1714 }
1715 } else {
1716 if (gctx->ctr) {
1717 size_t bulk = 0;
1718 # if defined(AES_GCM_ASM)
1719 if (len >= 16 && AES_GCM_ASM(gctx)) {
1720 size_t res = (16 - gctx->gcm.mres) % 16;
1721
1722 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
1723 return -1;
1724
1725 bulk = AES_gcm_decrypt(in + res,
1726 out + res, len - res,
1727 gctx->gcm.key,
1728 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1729 gctx->gcm.len.u[1] += bulk;
1730 bulk += res;
1731 }
1732 # endif
1733 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
1734 in + bulk,
1735 out + bulk,
1736 len - bulk, gctx->ctr))
1737 return -1;
1738 } else {
1739 size_t bulk = 0;
1740 # if defined(AES_GCM_ASM2)
1741 if (len >= 16 && AES_GCM_ASM2(gctx)) {
1742 size_t res = (16 - gctx->gcm.mres) % 16;
1743
1744 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
1745 return -1;
1746
1747 bulk = AES_gcm_decrypt(in + res,
1748 out + res, len - res,
1749 gctx->gcm.key,
1750 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
1751 gctx->gcm.len.u[1] += bulk;
1752 bulk += res;
1753 }
1754 # endif
1755 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
1756 in + bulk, out + bulk, len - bulk))
1757 return -1;
1758 }
1759 }
1760 return len;
1761 } else {
1762 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
1763 if (gctx->taglen < 0)
1764 return -1;
1765 if (CRYPTO_gcm128_finish(&gctx->gcm,
1766 EVP_CIPHER_CTX_buf_noconst(ctx),
1767 gctx->taglen) != 0)
1768 return -1;
1769 gctx->iv_set = 0;
1770 return 0;
1771 }
1772 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
1773 gctx->taglen = 16;
1774 /* Don't reuse the IV */
1775 gctx->iv_set = 0;
1776 return 0;
1777 }
1778
1779 }
1780
1781 # define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
1782 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
1783 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
1784 | EVP_CIPH_CUSTOM_COPY)
1785
1786 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
1787 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
1788 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
1789 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
1790 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
1791 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
1792
1793 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1794 {
1795 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
1796 if (type == EVP_CTRL_COPY) {
1797 EVP_CIPHER_CTX *out = ptr;
1798 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
1799 if (xctx->xts.key1) {
1800 if (xctx->xts.key1 != &xctx->ks1)
1801 return 0;
1802 xctx_out->xts.key1 = &xctx_out->ks1;
1803 }
1804 if (xctx->xts.key2) {
1805 if (xctx->xts.key2 != &xctx->ks2)
1806 return 0;
1807 xctx_out->xts.key2 = &xctx_out->ks2;
1808 }
1809 return 1;
1810 } else if (type != EVP_CTRL_INIT)
1811 return -1;
1812 /* key1 and key2 are used as an indicator both key and IV are set */
1813 xctx->xts.key1 = NULL;
1814 xctx->xts.key2 = NULL;
1815 return 1;
1816 }
1817
1818 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1819 const unsigned char *iv, int enc)
1820 {
1821 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
1822 if (!iv && !key)
1823 return 1;
1824
1825 if (key)
1826 do {
1827 # ifdef AES_XTS_ASM
1828 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
1829 # else
1830 xctx->stream = NULL;
1831 # endif
1832 /* key_len is two AES keys */
1833 # ifdef HWAES_CAPABLE
1834 if (HWAES_CAPABLE) {
1835 if (enc) {
1836 HWAES_set_encrypt_key(key,
1837 EVP_CIPHER_CTX_key_length(ctx) * 4,
1838 &xctx->ks1.ks);
1839 xctx->xts.block1 = (block128_f) HWAES_encrypt;
1840 } else {
1841 HWAES_set_decrypt_key(key,
1842 EVP_CIPHER_CTX_key_length(ctx) * 4,
1843 &xctx->ks1.ks);
1844 xctx->xts.block1 = (block128_f) HWAES_decrypt;
1845 }
1846
1847 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
1848 EVP_CIPHER_CTX_key_length(ctx) * 4,
1849 &xctx->ks2.ks);
1850 xctx->xts.block2 = (block128_f) HWAES_encrypt;
1851
1852 xctx->xts.key1 = &xctx->ks1;
1853 break;
1854 } else
1855 # endif
1856 # ifdef BSAES_CAPABLE
1857 if (BSAES_CAPABLE)
1858 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
1859 else
1860 # endif
1861 # ifdef VPAES_CAPABLE
1862 if (VPAES_CAPABLE) {
1863 if (enc) {
1864 vpaes_set_encrypt_key(key,
1865 EVP_CIPHER_CTX_key_length(ctx) * 4,
1866 &xctx->ks1.ks);
1867 xctx->xts.block1 = (block128_f) vpaes_encrypt;
1868 } else {
1869 vpaes_set_decrypt_key(key,
1870 EVP_CIPHER_CTX_key_length(ctx) * 4,
1871 &xctx->ks1.ks);
1872 xctx->xts.block1 = (block128_f) vpaes_decrypt;
1873 }
1874
1875 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
1876 EVP_CIPHER_CTX_key_length(ctx) * 4,
1877 &xctx->ks2.ks);
1878 xctx->xts.block2 = (block128_f) vpaes_encrypt;
1879
1880 xctx->xts.key1 = &xctx->ks1;
1881 break;
1882 } else
1883 # endif
1884 (void)0; /* terminate potentially open 'else' */
1885
1886 if (enc) {
1887 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
1888 &xctx->ks1.ks);
1889 xctx->xts.block1 = (block128_f) AES_encrypt;
1890 } else {
1891 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
1892 &xctx->ks1.ks);
1893 xctx->xts.block1 = (block128_f) AES_decrypt;
1894 }
1895
1896 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
1897 EVP_CIPHER_CTX_key_length(ctx) * 4,
1898 &xctx->ks2.ks);
1899 xctx->xts.block2 = (block128_f) AES_encrypt;
1900
1901 xctx->xts.key1 = &xctx->ks1;
1902 } while (0);
1903
1904 if (iv) {
1905 xctx->xts.key2 = &xctx->ks2;
1906 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
1907 }
1908
1909 return 1;
1910 }
1911
1912 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1913 const unsigned char *in, size_t len)
1914 {
1915 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
1916 if (!xctx->xts.key1 || !xctx->xts.key2)
1917 return 0;
1918 if (!out || !in || len < AES_BLOCK_SIZE)
1919 return 0;
1920 if (xctx->stream)
1921 (*xctx->stream) (in, out, len,
1922 xctx->xts.key1, xctx->xts.key2,
1923 EVP_CIPHER_CTX_iv_noconst(ctx));
1924 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
1925 in, out, len,
1926 EVP_CIPHER_CTX_encrypting(ctx)))
1927 return 0;
1928 return 1;
1929 }
1930
1931 # define aes_xts_cleanup NULL
1932
1933 # define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
1934 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
1935 | EVP_CIPH_CUSTOM_COPY)
1936
1937 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
1938 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
1939
1940 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1941 {
1942 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
1943 switch (type) {
1944 case EVP_CTRL_INIT:
1945 cctx->key_set = 0;
1946 cctx->iv_set = 0;
1947 cctx->L = 8;
1948 cctx->M = 12;
1949 cctx->tag_set = 0;
1950 cctx->len_set = 0;
1951 cctx->tls_aad_len = -1;
1952 return 1;
1953
1954 case EVP_CTRL_AEAD_TLS1_AAD:
1955 /* Save the AAD for later use */
1956 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1957 return 0;
1958 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
1959 cctx->tls_aad_len = arg;
1960 {
1961 uint16_t len =
1962 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
1963 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
1964 /* Correct length for explicit IV */
1965 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
1966 /* If decrypting correct for tag too */
1967 if (!EVP_CIPHER_CTX_encrypting(c))
1968 len -= cctx->M;
1969 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
1970 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
1971 }
1972 /* Extra padding: tag appended to record */
1973 return cctx->M;
1974
1975 case EVP_CTRL_CCM_SET_IV_FIXED:
1976 /* Sanity check length */
1977 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
1978 return 0;
1979 /* Just copy to first part of IV */
1980 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
1981 return 1;
1982
1983 case EVP_CTRL_AEAD_SET_IVLEN:
1984 arg = 15 - arg;
1985 case EVP_CTRL_CCM_SET_L:
1986 if (arg < 2 || arg > 8)
1987 return 0;
1988 cctx->L = arg;
1989 return 1;
1990
1991 case EVP_CTRL_AEAD_SET_TAG:
1992 if ((arg & 1) || arg < 4 || arg > 16)
1993 return 0;
1994 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
1995 return 0;
1996 if (ptr) {
1997 cctx->tag_set = 1;
1998 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
1999 }
2000 cctx->M = arg;
2001 return 1;
2002
2003 case EVP_CTRL_AEAD_GET_TAG:
2004 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
2005 return 0;
2006 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
2007 return 0;
2008 cctx->tag_set = 0;
2009 cctx->iv_set = 0;
2010 cctx->len_set = 0;
2011 return 1;
2012
2013 case EVP_CTRL_COPY:
2014 {
2015 EVP_CIPHER_CTX *out = ptr;
2016 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
2017 if (cctx->ccm.key) {
2018 if (cctx->ccm.key != &cctx->ks)
2019 return 0;
2020 cctx_out->ccm.key = &cctx_out->ks;
2021 }
2022 return 1;
2023 }
2024
2025 default:
2026 return -1;
2027
2028 }
2029 }
2030
2031 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2032 const unsigned char *iv, int enc)
2033 {
2034 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
2035 if (!iv && !key)
2036 return 1;
2037 if (key)
2038 do {
2039 # ifdef HWAES_CAPABLE
2040 if (HWAES_CAPABLE) {
2041 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2042 &cctx->ks.ks);
2043
2044 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
2045 &cctx->ks, (block128_f) HWAES_encrypt);
2046 cctx->str = NULL;
2047 cctx->key_set = 1;
2048 break;
2049 } else
2050 # endif
2051 # ifdef VPAES_CAPABLE
2052 if (VPAES_CAPABLE) {
2053 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2054 &cctx->ks.ks);
2055 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
2056 &cctx->ks, (block128_f) vpaes_encrypt);
2057 cctx->str = NULL;
2058 cctx->key_set = 1;
2059 break;
2060 }
2061 # endif
2062 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2063 &cctx->ks.ks);
2064 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
2065 &cctx->ks, (block128_f) AES_encrypt);
2066 cctx->str = NULL;
2067 cctx->key_set = 1;
2068 } while (0);
2069 if (iv) {
2070 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
2071 cctx->iv_set = 1;
2072 }
2073 return 1;
2074 }
2075
2076 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2077 const unsigned char *in, size_t len)
2078 {
2079 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
2080 CCM128_CONTEXT *ccm = &cctx->ccm;
2081 /* Encrypt/decrypt must be performed in place */
2082 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
2083 return -1;
2084 /* If encrypting set explicit IV from sequence number (start of AAD) */
2085 if (EVP_CIPHER_CTX_encrypting(ctx))
2086 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
2087 EVP_CCM_TLS_EXPLICIT_IV_LEN);
2088 /* Get rest of IV from explicit IV */
2089 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
2090 EVP_CCM_TLS_EXPLICIT_IV_LEN);
2091 /* Correct length value */
2092 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
2093 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
2094 len))
2095 return -1;
2096 /* Use saved AAD */
2097 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
2098 /* Fix buffer to point to payload */
2099 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2100 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2101 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2102 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
2103 cctx->str) :
2104 CRYPTO_ccm128_encrypt(ccm, in, out, len))
2105 return -1;
2106 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
2107 return -1;
2108 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
2109 } else {
2110 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
2111 cctx->str) :
2112 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
2113 unsigned char tag[16];
2114 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
2115 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
2116 return len;
2117 }
2118 }
2119 OPENSSL_cleanse(out, len);
2120 return -1;
2121 }
2122 }
2123
2124 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2125 const unsigned char *in, size_t len)
2126 {
2127 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
2128 CCM128_CONTEXT *ccm = &cctx->ccm;
2129 /* If not set up, return error */
2130 if (!cctx->key_set)
2131 return -1;
2132
2133 if (cctx->tls_aad_len >= 0)
2134 return aes_ccm_tls_cipher(ctx, out, in, len);
2135
2136 if (!cctx->iv_set)
2137 return -1;
2138
2139 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
2140 return -1;
2141 if (!out) {
2142 if (!in) {
2143 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
2144 15 - cctx->L, len))
2145 return -1;
2146 cctx->len_set = 1;
2147 return len;
2148 }
2149 /* If have AAD need message length */
2150 if (!cctx->len_set && len)
2151 return -1;
2152 CRYPTO_ccm128_aad(ccm, in, len);
2153 return len;
2154 }
2155 /* EVP_*Final() doesn't return any data */
2156 if (!in)
2157 return 0;
2158 /* If not set length yet do it */
2159 if (!cctx->len_set) {
2160 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
2161 15 - cctx->L, len))
2162 return -1;
2163 cctx->len_set = 1;
2164 }
2165 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2166 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
2167 cctx->str) :
2168 CRYPTO_ccm128_encrypt(ccm, in, out, len))
2169 return -1;
2170 cctx->tag_set = 1;
2171 return len;
2172 } else {
2173 int rv = -1;
2174 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
2175 cctx->str) :
2176 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
2177 unsigned char tag[16];
2178 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
2179 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
2180 cctx->M))
2181 rv = len;
2182 }
2183 }
2184 if (rv == -1)
2185 OPENSSL_cleanse(out, len);
2186 cctx->iv_set = 0;
2187 cctx->tag_set = 0;
2188 cctx->len_set = 0;
2189 return rv;
2190 }
2191 }
2192
2193 # define aes_ccm_cleanup NULL
2194
2195 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
2196 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2197 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
2198 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2199 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
2200 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2201
2202 typedef struct {
2203 union {
2204 double align;
2205 AES_KEY ks;
2206 } ks;
2207 /* Indicates if IV has been set */
2208 unsigned char *iv;
2209 } EVP_AES_WRAP_CTX;
2210
2211 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2212 const unsigned char *iv, int enc)
2213 {
2214 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
2215 if (!iv && !key)
2216 return 1;
2217 if (key) {
2218 if (EVP_CIPHER_CTX_encrypting(ctx))
2219 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2220 &wctx->ks.ks);
2221 else
2222 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2223 &wctx->ks.ks);
2224 if (!iv)
2225 wctx->iv = NULL;
2226 }
2227 if (iv) {
2228 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
2229 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
2230 }
2231 return 1;
2232 }
2233
2234 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2235 const unsigned char *in, size_t inlen)
2236 {
2237 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
2238 size_t rv;
2239 /* AES wrap with padding has IV length of 4, without padding 8 */
2240 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
2241 /* No final operation so always return zero length */
2242 if (!in)
2243 return 0;
2244 /* Input length must always be non-zero */
2245 if (!inlen)
2246 return -1;
2247 /* If decrypting need at least 16 bytes and multiple of 8 */
2248 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
2249 return -1;
2250 /* If not padding input must be multiple of 8 */
2251 if (!pad && inlen & 0x7)
2252 return -1;
2253 if (!out) {
2254 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2255 /* If padding round up to multiple of 8 */
2256 if (pad)
2257 inlen = (inlen + 7) / 8 * 8;
2258 /* 8 byte prefix */
2259 return inlen + 8;
2260 } else {
2261 /*
2262 * If not padding output will be exactly 8 bytes smaller than
2263 * input. If padding it will be at least 8 bytes smaller but we
2264 * don't know how much.
2265 */
2266 return inlen - 8;
2267 }
2268 }
2269 if (pad) {
2270 if (EVP_CIPHER_CTX_encrypting(ctx))
2271 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
2272 out, in, inlen,
2273 (block128_f) AES_encrypt);
2274 else
2275 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
2276 out, in, inlen,
2277 (block128_f) AES_decrypt);
2278 } else {
2279 if (EVP_CIPHER_CTX_encrypting(ctx))
2280 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
2281 out, in, inlen, (block128_f) AES_encrypt);
2282 else
2283 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
2284 out, in, inlen, (block128_f) AES_decrypt);
2285 }
2286 return rv ? (int)rv : -1;
2287 }
2288
2289 # define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
2290 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
2291 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
2292
2293 static const EVP_CIPHER aes_128_wrap = {
2294 NID_id_aes128_wrap,
2295 8, 16, 8, WRAP_FLAGS,
2296 aes_wrap_init_key, aes_wrap_cipher,
2297 NULL,
2298 sizeof(EVP_AES_WRAP_CTX),
2299 NULL, NULL, NULL, NULL
2300 };
2301
2302 const EVP_CIPHER *EVP_aes_128_wrap(void)
2303 {
2304 return &aes_128_wrap;
2305 }
2306
2307 static const EVP_CIPHER aes_192_wrap = {
2308 NID_id_aes192_wrap,
2309 8, 24, 8, WRAP_FLAGS,
2310 aes_wrap_init_key, aes_wrap_cipher,
2311 NULL,
2312 sizeof(EVP_AES_WRAP_CTX),
2313 NULL, NULL, NULL, NULL
2314 };
2315
2316 const EVP_CIPHER *EVP_aes_192_wrap(void)
2317 {
2318 return &aes_192_wrap;
2319 }
2320
2321 static const EVP_CIPHER aes_256_wrap = {
2322 NID_id_aes256_wrap,
2323 8, 32, 8, WRAP_FLAGS,
2324 aes_wrap_init_key, aes_wrap_cipher,
2325 NULL,
2326 sizeof(EVP_AES_WRAP_CTX),
2327 NULL, NULL, NULL, NULL
2328 };
2329
2330 const EVP_CIPHER *EVP_aes_256_wrap(void)
2331 {
2332 return &aes_256_wrap;
2333 }
2334
2335 static const EVP_CIPHER aes_128_wrap_pad = {
2336 NID_id_aes128_wrap_pad,
2337 8, 16, 4, WRAP_FLAGS,
2338 aes_wrap_init_key, aes_wrap_cipher,
2339 NULL,
2340 sizeof(EVP_AES_WRAP_CTX),
2341 NULL, NULL, NULL, NULL
2342 };
2343
2344 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
2345 {
2346 return &aes_128_wrap_pad;
2347 }
2348
2349 static const EVP_CIPHER aes_192_wrap_pad = {
2350 NID_id_aes192_wrap_pad,
2351 8, 24, 4, WRAP_FLAGS,
2352 aes_wrap_init_key, aes_wrap_cipher,
2353 NULL,
2354 sizeof(EVP_AES_WRAP_CTX),
2355 NULL, NULL, NULL, NULL
2356 };
2357
2358 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
2359 {
2360 return &aes_192_wrap_pad;
2361 }
2362
2363 static const EVP_CIPHER aes_256_wrap_pad = {
2364 NID_id_aes256_wrap_pad,
2365 8, 32, 4, WRAP_FLAGS,
2366 aes_wrap_init_key, aes_wrap_cipher,
2367 NULL,
2368 sizeof(EVP_AES_WRAP_CTX),
2369 NULL, NULL, NULL, NULL
2370 };
2371
2372 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
2373 {
2374 return &aes_256_wrap_pad;
2375 }
2376
2377 # ifndef OPENSSL_NO_OCB
2378 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2379 {
2380 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
2381 EVP_CIPHER_CTX *newc;
2382 EVP_AES_OCB_CTX *new_octx;
2383
2384 switch (type) {
2385 case EVP_CTRL_INIT:
2386 octx->key_set = 0;
2387 octx->iv_set = 0;
2388 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
2389 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
2390 octx->taglen = 16;
2391 octx->data_buf_len = 0;
2392 octx->aad_buf_len = 0;
2393 return 1;
2394
2395 case EVP_CTRL_AEAD_SET_IVLEN:
2396 /* IV len must be 1 to 15 */
2397 if (arg <= 0 || arg > 15)
2398 return 0;
2399
2400 octx->ivlen = arg;
2401 return 1;
2402
2403 case EVP_CTRL_AEAD_SET_TAG:
2404 if (!ptr) {
2405 /* Tag len must be 0 to 16 */
2406 if (arg < 0 || arg > 16)
2407 return 0;
2408
2409 octx->taglen = arg;
2410 return 1;
2411 }
2412 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
2413 return 0;
2414 memcpy(octx->tag, ptr, arg);
2415 return 1;
2416
2417 case EVP_CTRL_AEAD_GET_TAG:
2418 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
2419 return 0;
2420
2421 memcpy(ptr, octx->tag, arg);
2422 return 1;
2423
2424 case EVP_CTRL_COPY:
2425 newc = (EVP_CIPHER_CTX *)ptr;
2426 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
2427 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
2428 &new_octx->ksenc.ks,
2429 &new_octx->ksdec.ks);
2430
2431 default:
2432 return -1;
2433
2434 }
2435 }
2436
2437 # ifdef HWAES_CAPABLE
2438 # ifdef HWAES_ocb_encrypt
2439 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
2440 size_t blocks, const void *key,
2441 size_t start_block_num,
2442 unsigned char offset_i[16],
2443 const unsigned char L_[][16],
2444 unsigned char checksum[16]);
2445 # else
2446 # define HWAES_ocb_encrypt NULL
2447 # endif
2448 # ifdef HWAES_ocb_decrypt
2449 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
2450 size_t blocks, const void *key,
2451 size_t start_block_num,
2452 unsigned char offset_i[16],
2453 const unsigned char L_[][16],
2454 unsigned char checksum[16]);
2455 # else
2456 # define HWAES_ocb_decrypt NULL
2457 # endif
2458 # endif
2459
2460 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2461 const unsigned char *iv, int enc)
2462 {
2463 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
2464 if (!iv && !key)
2465 return 1;
2466 if (key) {
2467 do {
2468 /*
2469 * We set both the encrypt and decrypt key here because decrypt
2470 * needs both. We could possibly optimise to remove setting the
2471 * decrypt for an encryption operation.
2472 */
2473 # ifdef HWAES_CAPABLE
2474 if (HWAES_CAPABLE) {
2475 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2476 &octx->ksenc.ks);
2477 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2478 &octx->ksdec.ks);
2479 if (!CRYPTO_ocb128_init(&octx->ocb,
2480 &octx->ksenc.ks, &octx->ksdec.ks,
2481 (block128_f) HWAES_encrypt,
2482 (block128_f) HWAES_decrypt,
2483 enc ? HWAES_ocb_encrypt
2484 : HWAES_ocb_decrypt))
2485 return 0;
2486 break;
2487 }
2488 # endif
2489 # ifdef VPAES_CAPABLE
2490 if (VPAES_CAPABLE) {
2491 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2492 &octx->ksenc.ks);
2493 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2494 &octx->ksdec.ks);
2495 if (!CRYPTO_ocb128_init(&octx->ocb,
2496 &octx->ksenc.ks, &octx->ksdec.ks,
2497 (block128_f) vpaes_encrypt,
2498 (block128_f) vpaes_decrypt,
2499 NULL))
2500 return 0;
2501 break;
2502 }
2503 # endif
2504 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2505 &octx->ksenc.ks);
2506 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2507 &octx->ksdec.ks);
2508 if (!CRYPTO_ocb128_init(&octx->ocb,
2509 &octx->ksenc.ks, &octx->ksdec.ks,
2510 (block128_f) AES_encrypt,
2511 (block128_f) AES_decrypt,
2512 NULL))
2513 return 0;
2514 }
2515 while (0);
2516
2517 /*
2518 * If we have an iv we can set it directly, otherwise use saved IV.
2519 */
2520 if (iv == NULL && octx->iv_set)
2521 iv = octx->iv;
2522 if (iv) {
2523 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
2524 != 1)
2525 return 0;
2526 octx->iv_set = 1;
2527 }
2528 octx->key_set = 1;
2529 } else {
2530 /* If key set use IV, otherwise copy */
2531 if (octx->key_set)
2532 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
2533 else
2534 memcpy(octx->iv, iv, octx->ivlen);
2535 octx->iv_set = 1;
2536 }
2537 return 1;
2538 }
2539
2540 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2541 const unsigned char *in, size_t len)
2542 {
2543 unsigned char *buf;
2544 int *buf_len;
2545 int written_len = 0;
2546 size_t trailing_len;
2547 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
2548
2549 /* If IV or Key not set then return error */
2550 if (!octx->iv_set)
2551 return -1;
2552
2553 if (!octx->key_set)
2554 return -1;
2555
2556 if (in) {
2557 /*
2558 * Need to ensure we are only passing full blocks to low level OCB
2559 * routines. We do it here rather than in EVP_EncryptUpdate/
2560 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
2561 * and those routines don't support that
2562 */
2563
2564 /* Are we dealing with AAD or normal data here? */
2565 if (out == NULL) {
2566 buf = octx->aad_buf;
2567 buf_len = &(octx->aad_buf_len);
2568 } else {
2569 buf = octx->data_buf;
2570 buf_len = &(octx->data_buf_len);
2571 }
2572
2573 /*
2574 * If we've got a partially filled buffer from a previous call then
2575 * use that data first
2576 */
2577 if (*buf_len) {
2578 unsigned int remaining;
2579
2580 remaining = 16 - (*buf_len);
2581 if (remaining > len) {
2582 memcpy(buf + (*buf_len), in, len);
2583 *(buf_len) += len;
2584 return 0;
2585 }
2586 memcpy(buf + (*buf_len), in, remaining);
2587
2588 /*
2589 * If we get here we've filled the buffer, so process it
2590 */
2591 len -= remaining;
2592 in += remaining;
2593 if (out == NULL) {
2594 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, 16))
2595 return -1;
2596 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
2597 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out, 16))
2598 return -1;
2599 } else {
2600 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out, 16))
2601 return -1;
2602 }
2603 written_len = 16;
2604 *buf_len = 0;
2605 }
2606
2607 /* Do we have a partial block to handle at the end? */
2608 trailing_len = len % 16;
2609
2610 /*
2611 * If we've got some full blocks to handle, then process these first
2612 */
2613 if (len != trailing_len) {
2614 if (out == NULL) {
2615 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
2616 return -1;
2617 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
2618 if (!CRYPTO_ocb128_encrypt
2619 (&octx->ocb, in, out, len - trailing_len))
2620 return -1;
2621 } else {
2622 if (!CRYPTO_ocb128_decrypt
2623 (&octx->ocb, in, out, len - trailing_len))
2624 return -1;
2625 }
2626 written_len += len - trailing_len;
2627 in += len - trailing_len;
2628 }
2629
2630 /* Handle any trailing partial block */
2631 if (trailing_len) {
2632 memcpy(buf, in, trailing_len);
2633 *buf_len = trailing_len;
2634 }
2635
2636 return written_len;
2637 } else {
2638 /*
2639 * First of all empty the buffer of any partial block that we might
2640 * have been provided - both for data and AAD
2641 */
2642 if (octx->data_buf_len) {
2643 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2644 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
2645 octx->data_buf_len))
2646 return -1;
2647 } else {
2648 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
2649 octx->data_buf_len))
2650 return -1;
2651 }
2652 written_len = octx->data_buf_len;
2653 octx->data_buf_len = 0;
2654 }
2655 if (octx->aad_buf_len) {
2656 if (!CRYPTO_ocb128_aad
2657 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
2658 return -1;
2659 octx->aad_buf_len = 0;
2660 }
2661 /* If decrypting then verify */
2662 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
2663 if (octx->taglen < 0)
2664 return -1;
2665 if (CRYPTO_ocb128_finish(&octx->ocb,
2666 octx->tag, octx->taglen) != 0)
2667 return -1;
2668 octx->iv_set = 0;
2669 return written_len;
2670 }
2671 /* If encrypting then just get the tag */
2672 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
2673 return -1;
2674 /* Don't reuse the IV */
2675 octx->iv_set = 0;
2676 return written_len;
2677 }
2678 }
2679
2680 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
2681 {
2682 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
2683 CRYPTO_ocb128_cleanup(&octx->ocb);
2684 return 1;
2685 }
2686
2687 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
2688 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2689 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
2690 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2691 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
2692 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
2693 # endif /* OPENSSL_NO_OCB */
2694 #endif