]> git.ipfire.org Git - thirdparty/openssl.git/blob - crypto/evp/e_aes_cbc_hmac_sha256.c
evp/e_aes_cbc_sha[1|256].c: fix -DPEDANTIC build.
[thirdparty/openssl.git] / crypto / evp / e_aes_cbc_hmac_sha256.c
1 /* ====================================================================
2 * Copyright (c) 2011-2013 The OpenSSL Project. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 *
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in
13 * the documentation and/or other materials provided with the
14 * distribution.
15 *
16 * 3. All advertising materials mentioning features or use of this
17 * software must display the following acknowledgment:
18 * "This product includes software developed by the OpenSSL Project
19 * for use in the OpenSSL Toolkit. (http://www.OpenSSL.org/)"
20 *
21 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22 * endorse or promote products derived from this software without
23 * prior written permission. For written permission, please contact
24 * licensing@OpenSSL.org.
25 *
26 * 5. Products derived from this software may not be called "OpenSSL"
27 * nor may "OpenSSL" appear in their names without prior written
28 * permission of the OpenSSL Project.
29 *
30 * 6. Redistributions of any form whatsoever must retain the following
31 * acknowledgment:
32 * "This product includes software developed by the OpenSSL Project
33 * for use in the OpenSSL Toolkit (http://www.OpenSSL.org/)"
34 *
35 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
39 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46 * OF THE POSSIBILITY OF SUCH DAMAGE.
47 * ====================================================================
48 */
49
50 #include <openssl/opensslconf.h>
51
52 #include <stdio.h>
53 #include <string.h>
54
55 #if !defined(OPENSSL_NO_AES) && !defined(OPENSSL_NO_SHA256)
56
57 #include <openssl/evp.h>
58 #include <openssl/objects.h>
59 #include <openssl/aes.h>
60 #include <openssl/sha.h>
61 #include <openssl/rand.h>
62 #include "modes_lcl.h"
63
64 #ifndef EVP_CIPH_FLAG_AEAD_CIPHER
65 #define EVP_CIPH_FLAG_AEAD_CIPHER 0x200000
66 #define EVP_CTRL_AEAD_TLS1_AAD 0x16
67 #define EVP_CTRL_AEAD_SET_MAC_KEY 0x17
68 #endif
69
70 #if !defined(EVP_CIPH_FLAG_DEFAULT_ASN1)
71 #define EVP_CIPH_FLAG_DEFAULT_ASN1 0
72 #endif
73
74 #if !defined(EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK)
75 #define EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK 0
76 #endif
77
78 #define TLS1_1_VERSION 0x0302
79
80 typedef struct
81 {
82 AES_KEY ks;
83 SHA256_CTX head,tail,md;
84 size_t payload_length; /* AAD length in decrypt case */
85 union {
86 unsigned int tls_ver;
87 unsigned char tls_aad[16]; /* 13 used */
88 } aux;
89 } EVP_AES_HMAC_SHA256;
90
91 #define NO_PAYLOAD_LENGTH ((size_t)-1)
92
93 #if defined(AES_ASM) && ( \
94 defined(__x86_64) || defined(__x86_64__) || \
95 defined(_M_AMD64) || defined(_M_X64) || \
96 defined(__INTEL__) )
97
98 extern unsigned int OPENSSL_ia32cap_P[3];
99 #define AESNI_CAPABLE (1<<(57-32))
100
101 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
102 AES_KEY *key);
103 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
104 AES_KEY *key);
105
106 void aesni_cbc_encrypt(const unsigned char *in,
107 unsigned char *out,
108 size_t length,
109 const AES_KEY *key,
110 unsigned char *ivec, int enc);
111
112 int aesni_cbc_sha256_enc (const void *inp, void *out, size_t blocks,
113 const AES_KEY *key, unsigned char iv[16],
114 SHA256_CTX *ctx,const void *in0);
115
116 #define data(ctx) ((EVP_AES_HMAC_SHA256 *)(ctx)->cipher_data)
117
118 static int aesni_cbc_hmac_sha256_init_key(EVP_CIPHER_CTX *ctx,
119 const unsigned char *inkey,
120 const unsigned char *iv, int enc)
121 {
122 EVP_AES_HMAC_SHA256 *key = data(ctx);
123 int ret;
124
125 if (enc)
126 memset(&key->ks,0,sizeof(key->ks.rd_key)),
127 ret=aesni_set_encrypt_key(inkey,ctx->key_len*8,&key->ks);
128 else
129 ret=aesni_set_decrypt_key(inkey,ctx->key_len*8,&key->ks);
130
131 SHA256_Init(&key->head); /* handy when benchmarking */
132 key->tail = key->head;
133 key->md = key->head;
134
135 key->payload_length = NO_PAYLOAD_LENGTH;
136
137 return ret<0?0:1;
138 }
139
140 #define STITCHED_CALL
141
142 #if !defined(STITCHED_CALL)
143 #define aes_off 0
144 #endif
145
146 void sha256_block_data_order (void *c,const void *p,size_t len);
147
148 static void sha256_update(SHA256_CTX *c,const void *data,size_t len)
149 { const unsigned char *ptr = data;
150 size_t res;
151
152 if ((res = c->num)) {
153 res = SHA256_CBLOCK-res;
154 if (len<res) res=len;
155 SHA256_Update (c,ptr,res);
156 ptr += res;
157 len -= res;
158 }
159
160 res = len % SHA256_CBLOCK;
161 len -= res;
162
163 if (len) {
164 sha256_block_data_order(c,ptr,len/SHA256_CBLOCK);
165
166 ptr += len;
167 c->Nh += len>>29;
168 c->Nl += len<<=3;
169 if (c->Nl<(unsigned int)len) c->Nh++;
170 }
171
172 if (res)
173 SHA256_Update(c,ptr,res);
174 }
175
176 #ifdef SHA256_Update
177 #undef SHA256_Update
178 #endif
179 #define SHA256_Update sha256_update
180
181 #if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
182
183 typedef struct { unsigned int A[8],B[8],C[8],D[8],E[8],F[8],G[8],H[8]; } SHA256_MB_CTX;
184 typedef struct { const unsigned char *ptr; int blocks; } HASH_DESC;
185
186 void sha256_multi_block(SHA256_MB_CTX *,const HASH_DESC *,int);
187
188 typedef struct { const unsigned char *inp; unsigned char *out;
189 int blocks; u64 iv[2]; } CIPH_DESC;
190
191 void aesni_multi_cbc_encrypt(CIPH_DESC *,void *,int);
192
193 static size_t tls1_1_multi_block_encrypt(EVP_AES_HMAC_SHA256 *key,
194 unsigned char *out, const unsigned char *inp, size_t inp_len,
195 int n4x) /* n4x is 1 or 2 */
196 {
197 HASH_DESC hash_d[8], edges[8];
198 CIPH_DESC ciph_d[8];
199 unsigned char storage[sizeof(SHA256_MB_CTX)+32];
200 union { u64 q[16];
201 u32 d[32];
202 u8 c[128]; } blocks[8];
203 SHA256_MB_CTX *ctx;
204 unsigned int frag, last, packlen, i, x4=4*n4x, minblocks, processed=0;
205 size_t ret = 0;
206 u8 *IVs;
207 #if defined(BSWAP8)
208 u64 seqnum;
209 #else
210 unsigned int carry,j;
211 #endif
212
213 if (RAND_bytes((IVs=blocks[0].c),16*x4)<=0) /* ask for IVs in bulk */
214 return 0;
215
216 ctx = (SHA256_MB_CTX *)(storage+32-((size_t)storage%32)); /* align */
217
218 frag = (unsigned int)inp_len>>(1+n4x);
219 last = (unsigned int)inp_len+frag-(frag<<(1+n4x));
220 if (last>frag && ((last+13+9)%64)<(x4-1)) {
221 frag++;
222 last -= x4-1;
223 }
224
225 packlen = 5+16+((frag+32+16)&-16);
226
227 /* populate descriptors with pointers and IVs */
228 hash_d[0].ptr = inp;
229 ciph_d[0].inp = inp;
230 ciph_d[0].out = out+5+16; /* 5+16 is place for header and explicit IV */
231 memcpy(ciph_d[0].out-16,IVs,16);
232 memcpy(ciph_d[0].iv,IVs,16); IVs += 16;
233
234 for (i=1;i<x4;i++) {
235 ciph_d[i].inp = hash_d[i].ptr = hash_d[i-1].ptr+frag;
236 ciph_d[i].out = ciph_d[i-1].out+packlen;
237 memcpy(ciph_d[i].out-16,IVs,16);
238 memcpy(ciph_d[i].iv,IVs,16); IVs+=16;
239 }
240
241 #if defined(BSWAP8)
242 memcpy(blocks[0].c,key->md.data,8);
243 seqnum = BSWAP8(blocks[0].q[0]);
244 #endif
245 for (i=0;i<x4;i++) {
246 unsigned int len = (i==(x4-1)?last:frag);
247
248 ctx->A[i] = key->md.h[0];
249 ctx->B[i] = key->md.h[1];
250 ctx->C[i] = key->md.h[2];
251 ctx->D[i] = key->md.h[3];
252 ctx->E[i] = key->md.h[4];
253 ctx->F[i] = key->md.h[5];
254 ctx->G[i] = key->md.h[6];
255 ctx->H[i] = key->md.h[7];
256
257 /* fix seqnum */
258 #if defined(BSWAP8)
259 blocks[i].q[0] = BSWAP8(seqnum+i);
260 #else
261 for (carry=i,j=8;j--;) {
262 blocks[i].c[j] = ((u8*)key->md.data)[j]+carry;
263 carry = (blocks[i].c[j]-carry)>>(sizeof(carry)*8-1);
264 }
265 #endif
266 blocks[i].c[8] = ((u8*)key->md.data)[8];
267 blocks[i].c[9] = ((u8*)key->md.data)[9];
268 blocks[i].c[10] = ((u8*)key->md.data)[10];
269 /* fix length */
270 blocks[i].c[11] = (u8)(len>>8);
271 blocks[i].c[12] = (u8)(len);
272
273 memcpy(blocks[i].c+13,hash_d[i].ptr,64-13);
274 hash_d[i].ptr += 64-13;
275 hash_d[i].blocks = (len-(64-13))/64;
276
277 edges[i].ptr = blocks[i].c;
278 edges[i].blocks = 1;
279 }
280
281 /* hash 13-byte headers and first 64-13 bytes of inputs */
282 sha256_multi_block(ctx,edges,n4x);
283 /* hash bulk inputs */
284 #define MAXCHUNKSIZE 2048
285 #if MAXCHUNKSIZE%64
286 #error "MAXCHUNKSIZE is not divisible by 64"
287 #elif MAXCHUNKSIZE
288 /* goal is to minimize pressure on L1 cache by moving
289 * in shorter steps, so that hashed data is still in
290 * the cache by the time we encrypt it */
291 minblocks = ((frag<=last ? frag : last)-(64-13))/64;
292 if (minblocks>MAXCHUNKSIZE/64) {
293 for (i=0;i<x4;i++) {
294 edges[i].ptr = hash_d[i].ptr;
295 edges[i].blocks = MAXCHUNKSIZE/64;
296 ciph_d[i].blocks = MAXCHUNKSIZE/16;
297 }
298 do {
299 sha256_multi_block(ctx,edges,n4x);
300 aesni_multi_cbc_encrypt(ciph_d,&key->ks,n4x);
301
302 for (i=0;i<x4;i++) {
303 edges[i].ptr = hash_d[i].ptr += MAXCHUNKSIZE;
304 hash_d[i].blocks -= MAXCHUNKSIZE/64;
305 edges[i].blocks = MAXCHUNKSIZE/64;
306 ciph_d[i].inp += MAXCHUNKSIZE;
307 ciph_d[i].out += MAXCHUNKSIZE;
308 ciph_d[i].blocks = MAXCHUNKSIZE/16;
309 memcpy(ciph_d[i].iv,ciph_d[i].out-16,16);
310 }
311 processed += MAXCHUNKSIZE;
312 minblocks -= MAXCHUNKSIZE/64;
313 } while (minblocks>MAXCHUNKSIZE/64);
314 }
315 #endif
316 #undef MAXCHUNKSIZE
317 sha256_multi_block(ctx,hash_d,n4x);
318
319 memset(blocks,0,sizeof(blocks));
320 for (i=0;i<x4;i++) {
321 unsigned int len = (i==(x4-1)?last:frag),
322 off = hash_d[i].blocks*64;
323 const unsigned char *ptr = hash_d[i].ptr+off;
324
325 off = (len-processed)-(64-13)-off; /* remainder actually */
326 memcpy(blocks[i].c,ptr,off);
327 blocks[i].c[off]=0x80;
328 len += 64+13; /* 64 is HMAC header */
329 len *= 8; /* convert to bits */
330 if (off<(64-8)) {
331 PUTU32(blocks[i].c+60,len);
332 edges[i].blocks = 1;
333 } else {
334 PUTU32(blocks[i].c+124,len);
335 edges[i].blocks = 2;
336 }
337 edges[i].ptr = blocks[i].c;
338 }
339
340 /* hash input tails and finalize */
341 sha256_multi_block(ctx,edges,n4x);
342
343 memset(blocks,0,sizeof(blocks));
344 for (i=0;i<x4;i++) {
345 PUTU32(blocks[i].c+0,ctx->A[i]); ctx->A[i] = key->tail.h[0];
346 PUTU32(blocks[i].c+4,ctx->B[i]); ctx->B[i] = key->tail.h[1];
347 PUTU32(blocks[i].c+8,ctx->C[i]); ctx->C[i] = key->tail.h[2];
348 PUTU32(blocks[i].c+12,ctx->D[i]); ctx->D[i] = key->tail.h[3];
349 PUTU32(blocks[i].c+16,ctx->E[i]); ctx->E[i] = key->tail.h[4];
350 PUTU32(blocks[i].c+20,ctx->F[i]); ctx->F[i] = key->tail.h[5];
351 PUTU32(blocks[i].c+24,ctx->G[i]); ctx->G[i] = key->tail.h[6];
352 PUTU32(blocks[i].c+28,ctx->H[i]); ctx->H[i] = key->tail.h[7];
353 blocks[i].c[32] = 0x80;
354 PUTU32(blocks[i].c+60,(64+32)*8);
355 edges[i].ptr = blocks[i].c;
356 edges[i].blocks = 1;
357 }
358
359 /* finalize MACs */
360 sha256_multi_block(ctx,edges,n4x);
361
362 for (i=0;i<x4;i++) {
363 unsigned int len = (i==(x4-1)?last:frag), pad, j;
364 unsigned char *out0 = out;
365
366 memcpy(ciph_d[i].out,ciph_d[i].inp,len-processed);
367 ciph_d[i].inp = ciph_d[i].out;
368
369 out += 5+16+len;
370
371 /* write MAC */
372 PUTU32(out+0,ctx->A[i]);
373 PUTU32(out+4,ctx->B[i]);
374 PUTU32(out+8,ctx->C[i]);
375 PUTU32(out+12,ctx->D[i]);
376 PUTU32(out+16,ctx->E[i]);
377 PUTU32(out+20,ctx->F[i]);
378 PUTU32(out+24,ctx->G[i]);
379 PUTU32(out+28,ctx->H[i]);
380 out += 32;
381 len += 32;
382
383 /* pad */
384 pad = 15-len%16;
385 for (j=0;j<=pad;j++) *(out++) = pad;
386 len += pad+1;
387
388 ciph_d[i].blocks = (len-processed)/16;
389 len += 16; /* account for explicit iv */
390
391 /* arrange header */
392 out0[0] = ((u8*)key->md.data)[8];
393 out0[1] = ((u8*)key->md.data)[9];
394 out0[2] = ((u8*)key->md.data)[10];
395 out0[3] = (u8)(len>>8);
396 out0[4] = (u8)(len);
397
398 ret += len+5;
399 inp += frag;
400 }
401
402 aesni_multi_cbc_encrypt(ciph_d,&key->ks,n4x);
403
404 OPENSSL_cleanse(blocks,sizeof(blocks));
405 OPENSSL_cleanse(ctx,sizeof(*ctx));
406
407 return ret;
408 }
409 #endif
410
411 static int aesni_cbc_hmac_sha256_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
412 const unsigned char *in, size_t len)
413 {
414 EVP_AES_HMAC_SHA256 *key = data(ctx);
415 unsigned int l;
416 size_t plen = key->payload_length,
417 iv = 0, /* explicit IV in TLS 1.1 and later */
418 sha_off = 0;
419 #if defined(STITCHED_CALL)
420 size_t aes_off = 0,
421 blocks;
422
423 sha_off = SHA256_CBLOCK-key->md.num;
424 #endif
425
426 key->payload_length = NO_PAYLOAD_LENGTH;
427
428 if (len%AES_BLOCK_SIZE) return 0;
429
430 if (ctx->encrypt) {
431 if (plen==NO_PAYLOAD_LENGTH)
432 plen = len;
433 else if (len!=((plen+SHA256_DIGEST_LENGTH+AES_BLOCK_SIZE)&-AES_BLOCK_SIZE))
434 return 0;
435 else if (key->aux.tls_ver >= TLS1_1_VERSION)
436 iv = AES_BLOCK_SIZE;
437
438 #if defined(STITCHED_CALL)
439 if (OPENSSL_ia32cap_P[1]&(1<<(60-32)) && /* AVX? */
440 plen>(sha_off+iv) &&
441 (blocks=(plen-(sha_off+iv))/SHA256_CBLOCK)) {
442 SHA256_Update(&key->md,in+iv,sha_off);
443
444 (void)aesni_cbc_sha256_enc(in,out,blocks,&key->ks,
445 ctx->iv,&key->md,in+iv+sha_off);
446 blocks *= SHA256_CBLOCK;
447 aes_off += blocks;
448 sha_off += blocks;
449 key->md.Nh += blocks>>29;
450 key->md.Nl += blocks<<=3;
451 if (key->md.Nl<(unsigned int)blocks) key->md.Nh++;
452 } else {
453 sha_off = 0;
454 }
455 #endif
456 sha_off += iv;
457 SHA256_Update(&key->md,in+sha_off,plen-sha_off);
458
459 if (plen!=len) { /* "TLS" mode of operation */
460 if (in!=out)
461 memcpy(out+aes_off,in+aes_off,plen-aes_off);
462
463 /* calculate HMAC and append it to payload */
464 SHA256_Final(out+plen,&key->md);
465 key->md = key->tail;
466 SHA256_Update(&key->md,out+plen,SHA256_DIGEST_LENGTH);
467 SHA256_Final(out+plen,&key->md);
468
469 /* pad the payload|hmac */
470 plen += SHA256_DIGEST_LENGTH;
471 for (l=len-plen-1;plen<len;plen++) out[plen]=l;
472 /* encrypt HMAC|padding at once */
473 aesni_cbc_encrypt(out+aes_off,out+aes_off,len-aes_off,
474 &key->ks,ctx->iv,1);
475 } else {
476 aesni_cbc_encrypt(in+aes_off,out+aes_off,len-aes_off,
477 &key->ks,ctx->iv,1);
478 }
479 } else {
480 union { unsigned int u[SHA256_DIGEST_LENGTH/sizeof(unsigned int)];
481 unsigned char c[64+SHA256_DIGEST_LENGTH]; } mac, *pmac;
482
483 /* arrange cache line alignment */
484 pmac = (void *)(((size_t)mac.c+63)&((size_t)0-64));
485
486 /* decrypt HMAC|padding at once */
487 aesni_cbc_encrypt(in,out,len,
488 &key->ks,ctx->iv,0);
489
490 if (plen != NO_PAYLOAD_LENGTH) { /* "TLS" mode of operation */
491 size_t inp_len, mask, j, i;
492 unsigned int res, maxpad, pad, bitlen;
493 int ret = 1;
494 union { unsigned int u[SHA_LBLOCK];
495 unsigned char c[SHA256_CBLOCK]; }
496 *data = (void *)key->md.data;
497
498 if ((key->aux.tls_aad[plen-4]<<8|key->aux.tls_aad[plen-3])
499 >= TLS1_1_VERSION)
500 iv = AES_BLOCK_SIZE;
501
502 if (len<(iv+SHA256_DIGEST_LENGTH+1))
503 return 0;
504
505 /* omit explicit iv */
506 out += iv;
507 len -= iv;
508
509 /* figure out payload length */
510 pad = out[len-1];
511 maxpad = len-(SHA256_DIGEST_LENGTH+1);
512 maxpad |= (255-maxpad)>>(sizeof(maxpad)*8-8);
513 maxpad &= 255;
514
515 inp_len = len - (SHA256_DIGEST_LENGTH+pad+1);
516 mask = (0-((inp_len-len)>>(sizeof(inp_len)*8-1)));
517 inp_len &= mask;
518 ret &= (int)mask;
519
520 key->aux.tls_aad[plen-2] = inp_len>>8;
521 key->aux.tls_aad[plen-1] = inp_len;
522
523 /* calculate HMAC */
524 key->md = key->head;
525 SHA256_Update(&key->md,key->aux.tls_aad,plen);
526
527 #if 1
528 len -= SHA256_DIGEST_LENGTH; /* amend mac */
529 if (len>=(256+SHA256_CBLOCK)) {
530 j = (len-(256+SHA256_CBLOCK))&(0-SHA256_CBLOCK);
531 j += SHA256_CBLOCK-key->md.num;
532 SHA256_Update(&key->md,out,j);
533 out += j;
534 len -= j;
535 inp_len -= j;
536 }
537
538 /* but pretend as if we hashed padded payload */
539 bitlen = key->md.Nl+(inp_len<<3); /* at most 18 bits */
540 #ifdef BSWAP4
541 bitlen = BSWAP4(bitlen);
542 #else
543 mac.c[0] = 0;
544 mac.c[1] = (unsigned char)(bitlen>>16);
545 mac.c[2] = (unsigned char)(bitlen>>8);
546 mac.c[3] = (unsigned char)bitlen;
547 bitlen = mac.u[0];
548 #endif
549
550 pmac->u[0]=0;
551 pmac->u[1]=0;
552 pmac->u[2]=0;
553 pmac->u[3]=0;
554 pmac->u[4]=0;
555 pmac->u[5]=0;
556 pmac->u[6]=0;
557 pmac->u[7]=0;
558
559 for (res=key->md.num, j=0;j<len;j++) {
560 size_t c = out[j];
561 mask = (j-inp_len)>>(sizeof(j)*8-8);
562 c &= mask;
563 c |= 0x80&~mask&~((inp_len-j)>>(sizeof(j)*8-8));
564 data->c[res++]=(unsigned char)c;
565
566 if (res!=SHA256_CBLOCK) continue;
567
568 /* j is not incremented yet */
569 mask = 0-((inp_len+7-j)>>(sizeof(j)*8-1));
570 data->u[SHA_LBLOCK-1] |= bitlen&mask;
571 sha256_block_data_order(&key->md,data,1);
572 mask &= 0-((j-inp_len-72)>>(sizeof(j)*8-1));
573 pmac->u[0] |= key->md.h[0] & mask;
574 pmac->u[1] |= key->md.h[1] & mask;
575 pmac->u[2] |= key->md.h[2] & mask;
576 pmac->u[3] |= key->md.h[3] & mask;
577 pmac->u[4] |= key->md.h[4] & mask;
578 pmac->u[5] |= key->md.h[5] & mask;
579 pmac->u[6] |= key->md.h[6] & mask;
580 pmac->u[7] |= key->md.h[7] & mask;
581 res=0;
582 }
583
584 for(i=res;i<SHA256_CBLOCK;i++,j++) data->c[i]=0;
585
586 if (res>SHA256_CBLOCK-8) {
587 mask = 0-((inp_len+8-j)>>(sizeof(j)*8-1));
588 data->u[SHA_LBLOCK-1] |= bitlen&mask;
589 sha256_block_data_order(&key->md,data,1);
590 mask &= 0-((j-inp_len-73)>>(sizeof(j)*8-1));
591 pmac->u[0] |= key->md.h[0] & mask;
592 pmac->u[1] |= key->md.h[1] & mask;
593 pmac->u[2] |= key->md.h[2] & mask;
594 pmac->u[3] |= key->md.h[3] & mask;
595 pmac->u[4] |= key->md.h[4] & mask;
596 pmac->u[5] |= key->md.h[5] & mask;
597 pmac->u[6] |= key->md.h[6] & mask;
598 pmac->u[7] |= key->md.h[7] & mask;
599
600 memset(data,0,SHA256_CBLOCK);
601 j+=64;
602 }
603 data->u[SHA_LBLOCK-1] = bitlen;
604 sha256_block_data_order(&key->md,data,1);
605 mask = 0-((j-inp_len-73)>>(sizeof(j)*8-1));
606 pmac->u[0] |= key->md.h[0] & mask;
607 pmac->u[1] |= key->md.h[1] & mask;
608 pmac->u[2] |= key->md.h[2] & mask;
609 pmac->u[3] |= key->md.h[3] & mask;
610 pmac->u[4] |= key->md.h[4] & mask;
611 pmac->u[5] |= key->md.h[5] & mask;
612 pmac->u[6] |= key->md.h[6] & mask;
613 pmac->u[7] |= key->md.h[7] & mask;
614
615 #ifdef BSWAP4
616 pmac->u[0] = BSWAP4(pmac->u[0]);
617 pmac->u[1] = BSWAP4(pmac->u[1]);
618 pmac->u[2] = BSWAP4(pmac->u[2]);
619 pmac->u[3] = BSWAP4(pmac->u[3]);
620 pmac->u[4] = BSWAP4(pmac->u[4]);
621 pmac->u[5] = BSWAP4(pmac->u[5]);
622 pmac->u[6] = BSWAP4(pmac->u[6]);
623 pmac->u[7] = BSWAP4(pmac->u[7]);
624 #else
625 for (i=0;i<8;i++) {
626 res = pmac->u[i];
627 pmac->c[4*i+0]=(unsigned char)(res>>24);
628 pmac->c[4*i+1]=(unsigned char)(res>>16);
629 pmac->c[4*i+2]=(unsigned char)(res>>8);
630 pmac->c[4*i+3]=(unsigned char)res;
631 }
632 #endif
633 len += SHA256_DIGEST_LENGTH;
634 #else
635 SHA256_Update(&key->md,out,inp_len);
636 res = key->md.num;
637 SHA256_Final(pmac->c,&key->md);
638
639 {
640 unsigned int inp_blocks, pad_blocks;
641
642 /* but pretend as if we hashed padded payload */
643 inp_blocks = 1+((SHA256_CBLOCK-9-res)>>(sizeof(res)*8-1));
644 res += (unsigned int)(len-inp_len);
645 pad_blocks = res / SHA256_CBLOCK;
646 res %= SHA256_CBLOCK;
647 pad_blocks += 1+((SHA256_CBLOCK-9-res)>>(sizeof(res)*8-1));
648 for (;inp_blocks<pad_blocks;inp_blocks++)
649 sha1_block_data_order(&key->md,data,1);
650 }
651 #endif
652 key->md = key->tail;
653 SHA256_Update(&key->md,pmac->c,SHA256_DIGEST_LENGTH);
654 SHA256_Final(pmac->c,&key->md);
655
656 /* verify HMAC */
657 out += inp_len;
658 len -= inp_len;
659 #if 1
660 {
661 unsigned char *p = out+len-1-maxpad-SHA256_DIGEST_LENGTH;
662 size_t off = out-p;
663 unsigned int c, cmask;
664
665 maxpad += SHA256_DIGEST_LENGTH;
666 for (res=0,i=0,j=0;j<maxpad;j++) {
667 c = p[j];
668 cmask = ((int)(j-off-SHA256_DIGEST_LENGTH))>>(sizeof(int)*8-1);
669 res |= (c^pad)&~cmask; /* ... and padding */
670 cmask &= ((int)(off-1-j))>>(sizeof(int)*8-1);
671 res |= (c^pmac->c[i])&cmask;
672 i += 1&cmask;
673 }
674 maxpad -= SHA256_DIGEST_LENGTH;
675
676 res = 0-((0-res)>>(sizeof(res)*8-1));
677 ret &= (int)~res;
678 }
679 #else
680 for (res=0,i=0;i<SHA256_DIGEST_LENGTH;i++)
681 res |= out[i]^pmac->c[i];
682 res = 0-((0-res)>>(sizeof(res)*8-1));
683 ret &= (int)~res;
684
685 /* verify padding */
686 pad = (pad&~res) | (maxpad&res);
687 out = out+len-1-pad;
688 for (res=0,i=0;i<pad;i++)
689 res |= out[i]^pad;
690
691 res = (0-res)>>(sizeof(res)*8-1);
692 ret &= (int)~res;
693 #endif
694 return ret;
695 } else {
696 SHA256_Update(&key->md,out,len);
697 }
698 }
699
700 return 1;
701 }
702
703 static int aesni_cbc_hmac_sha256_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg, void *ptr)
704 {
705 EVP_AES_HMAC_SHA256 *key = data(ctx);
706
707 switch (type)
708 {
709 case EVP_CTRL_AEAD_SET_MAC_KEY:
710 {
711 unsigned int i;
712 unsigned char hmac_key[64];
713
714 memset (hmac_key,0,sizeof(hmac_key));
715
716 if (arg > (int)sizeof(hmac_key)) {
717 SHA256_Init(&key->head);
718 SHA256_Update(&key->head,ptr,arg);
719 SHA256_Final(hmac_key,&key->head);
720 } else {
721 memcpy(hmac_key,ptr,arg);
722 }
723
724 for (i=0;i<sizeof(hmac_key);i++)
725 hmac_key[i] ^= 0x36; /* ipad */
726 SHA256_Init(&key->head);
727 SHA256_Update(&key->head,hmac_key,sizeof(hmac_key));
728
729 for (i=0;i<sizeof(hmac_key);i++)
730 hmac_key[i] ^= 0x36^0x5c; /* opad */
731 SHA256_Init(&key->tail);
732 SHA256_Update(&key->tail,hmac_key,sizeof(hmac_key));
733
734 OPENSSL_cleanse(hmac_key,sizeof(hmac_key));
735
736 return 1;
737 }
738 case EVP_CTRL_AEAD_TLS1_AAD:
739 {
740 unsigned char *p=ptr;
741 unsigned int len=p[arg-2]<<8|p[arg-1];
742
743 if (ctx->encrypt)
744 {
745 key->payload_length = len;
746 if ((key->aux.tls_ver=p[arg-4]<<8|p[arg-3]) >= TLS1_1_VERSION) {
747 len -= AES_BLOCK_SIZE;
748 p[arg-2] = len>>8;
749 p[arg-1] = len;
750 }
751 key->md = key->head;
752 SHA256_Update(&key->md,p,arg);
753
754 return (int)(((len+SHA256_DIGEST_LENGTH+AES_BLOCK_SIZE)&-AES_BLOCK_SIZE)
755 - len);
756 }
757 else
758 {
759 if (arg>13) arg = 13;
760 memcpy(key->aux.tls_aad,ptr,arg);
761 key->payload_length = arg;
762
763 return SHA256_DIGEST_LENGTH;
764 }
765 }
766 #if !defined(OPENSSL_NO_MULTIBLOCK) && EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK
767 case EVP_CTRL_TLS1_1_MULTIBLOCK_MAX_BUFSIZE:
768 return (int)(5+16+((arg+32+16)&-16));
769 case EVP_CTRL_TLS1_1_MULTIBLOCK_AAD:
770 {
771 EVP_CTRL_TLS1_1_MULTIBLOCK_PARAM *param =
772 (EVP_CTRL_TLS1_1_MULTIBLOCK_PARAM *)ptr;
773 unsigned int n4x=1, x4;
774 unsigned int frag, last, packlen, inp_len;
775
776 if (arg<sizeof(EVP_CTRL_TLS1_1_MULTIBLOCK_PARAM)) return -1;
777
778 inp_len = param->inp[11]<<8|param->inp[12];
779
780 if (ctx->encrypt)
781 {
782 if ((param->inp[9]<<8|param->inp[10]) < TLS1_1_VERSION)
783 return -1;
784
785 if (inp_len)
786 {
787 if (inp_len<4096) return 0; /* too short */
788
789 if (inp_len>=8192 && OPENSSL_ia32cap_P[2]&(1<<5))
790 n4x=2; /* AVX2 */
791 }
792 else if ((n4x=param->interleave/4) && n4x<=2)
793 inp_len = param->len;
794 else
795 return -1;
796
797 key->md = key->head;
798 SHA256_Update(&key->md,param->inp,13);
799
800 x4 = 4*n4x; n4x += 1;
801
802 frag = inp_len>>n4x;
803 last = inp_len+frag-(frag<<n4x);
804 if (last>frag && ((last+13+9)%64<(x4-1))) {
805 frag++;
806 last -= x4-1;
807 }
808
809 packlen = 5+16+((frag+32+16)&-16);
810 packlen = (packlen<<n4x)-packlen;
811 packlen += 5+16+((last+32+16)&-16);
812
813 param->interleave = x4;
814
815 return (int)packlen;
816 }
817 else
818 return -1; /* not yet */
819 }
820 case EVP_CTRL_TLS1_1_MULTIBLOCK_ENCRYPT:
821 {
822 EVP_CTRL_TLS1_1_MULTIBLOCK_PARAM *param =
823 (EVP_CTRL_TLS1_1_MULTIBLOCK_PARAM *)ptr;
824
825 return (int)tls1_1_multi_block_encrypt(key,param->out,param->inp,
826 param->len,param->interleave/4);
827 }
828 case EVP_CTRL_TLS1_1_MULTIBLOCK_DECRYPT:
829 #endif
830 default:
831 return -1;
832 }
833 }
834
835 static EVP_CIPHER aesni_128_cbc_hmac_sha256_cipher =
836 {
837 #ifdef NID_aes_128_cbc_hmac_sha256
838 NID_aes_128_cbc_hmac_sha256,
839 #else
840 NID_undef,
841 #endif
842 16,16,16,
843 EVP_CIPH_CBC_MODE|EVP_CIPH_FLAG_DEFAULT_ASN1|
844 EVP_CIPH_FLAG_AEAD_CIPHER|EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK,
845 aesni_cbc_hmac_sha256_init_key,
846 aesni_cbc_hmac_sha256_cipher,
847 NULL,
848 sizeof(EVP_AES_HMAC_SHA256),
849 EVP_CIPH_FLAG_DEFAULT_ASN1?NULL:EVP_CIPHER_set_asn1_iv,
850 EVP_CIPH_FLAG_DEFAULT_ASN1?NULL:EVP_CIPHER_get_asn1_iv,
851 aesni_cbc_hmac_sha256_ctrl,
852 NULL
853 };
854
855 static EVP_CIPHER aesni_256_cbc_hmac_sha256_cipher =
856 {
857 #ifdef NID_aes_256_cbc_hmac_sha256
858 NID_aes_256_cbc_hmac_sha256,
859 #else
860 NID_undef,
861 #endif
862 16,32,16,
863 EVP_CIPH_CBC_MODE|EVP_CIPH_FLAG_DEFAULT_ASN1|
864 EVP_CIPH_FLAG_AEAD_CIPHER|EVP_CIPH_FLAG_TLS1_1_MULTIBLOCK,
865 aesni_cbc_hmac_sha256_init_key,
866 aesni_cbc_hmac_sha256_cipher,
867 NULL,
868 sizeof(EVP_AES_HMAC_SHA256),
869 EVP_CIPH_FLAG_DEFAULT_ASN1?NULL:EVP_CIPHER_set_asn1_iv,
870 EVP_CIPH_FLAG_DEFAULT_ASN1?NULL:EVP_CIPHER_get_asn1_iv,
871 aesni_cbc_hmac_sha256_ctrl,
872 NULL
873 };
874
875 const EVP_CIPHER *EVP_aes_128_cbc_hmac_sha256(void)
876 {
877 return((OPENSSL_ia32cap_P[1]&AESNI_CAPABLE) &&
878 aesni_cbc_sha256_enc(NULL,NULL,0,NULL,NULL,NULL,NULL) ?
879 &aesni_128_cbc_hmac_sha256_cipher:NULL);
880 }
881
882 const EVP_CIPHER *EVP_aes_256_cbc_hmac_sha256(void)
883 {
884 return((OPENSSL_ia32cap_P[1]&AESNI_CAPABLE) &&
885 aesni_cbc_sha256_enc(NULL,NULL,0,NULL,NULL,NULL,NULL)?
886 &aesni_256_cbc_hmac_sha256_cipher:NULL);
887 }
888 #else
889 const EVP_CIPHER *EVP_aes_128_cbc_hmac_sha256(void)
890 {
891 return NULL;
892 }
893 const EVP_CIPHER *EVP_aes_256_cbc_hmac_sha256(void)
894 {
895 return NULL;
896 }
897 #endif
898 #endif