2 * Copyright 2017-2018 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
5 * Licensed under the Apache License 2.0 (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
11 #include "internal/cryptlib.h"
12 #ifndef OPENSSL_NO_ARIA
13 # include <openssl/evp.h>
14 # include <openssl/modes.h>
15 # include <openssl/rand.h>
16 # include <openssl/rand_drbg.h>
17 # include "internal/aria.h"
18 # include "internal/evp_int.h"
19 # include "modes_lcl.h"
20 # include "evp_locl.h"
22 /* ARIA subkey Structure */
27 /* ARIA GCM context */
32 } ks
; /* ARIA subkey to use */
33 int key_set
; /* Set if key initialised */
34 int iv_set
; /* Set if an iv is set */
36 unsigned char *iv
; /* Temporary IV store */
37 int ivlen
; /* IV length */
39 int iv_gen
; /* It is OK to generate IVs */
40 int tls_aad_len
; /* TLS AAD length */
43 /* ARIA CCM context */
48 } ks
; /* ARIA key schedule to use */
49 int key_set
; /* Set if key initialised */
50 int iv_set
; /* Set if an iv is set */
51 int tag_set
; /* Set if tag is valid */
52 int len_set
; /* Set if message length set */
53 int L
, M
; /* L and M parameters from RFC3610 */
54 int tls_aad_len
; /* TLS AAD length */
59 /* The subkey for ARIA is generated. */
60 static int aria_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
61 const unsigned char *iv
, int enc
)
64 int mode
= EVP_CIPHER_CTX_mode(ctx
);
66 if (enc
|| (mode
!= EVP_CIPH_ECB_MODE
&& mode
!= EVP_CIPH_CBC_MODE
))
67 ret
= aria_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
68 EVP_CIPHER_CTX_get_cipher_data(ctx
));
70 ret
= aria_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
71 EVP_CIPHER_CTX_get_cipher_data(ctx
));
73 EVPerr(EVP_F_ARIA_INIT_KEY
,EVP_R_ARIA_KEY_SETUP_FAILED
);
79 static void aria_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
80 size_t len
, const ARIA_KEY
*key
,
81 unsigned char *ivec
, const int enc
)
85 CRYPTO_cbc128_encrypt(in
, out
, len
, key
, ivec
,
86 (block128_f
) aria_encrypt
);
88 CRYPTO_cbc128_decrypt(in
, out
, len
, key
, ivec
,
89 (block128_f
) aria_encrypt
);
92 static void aria_cfb128_encrypt(const unsigned char *in
, unsigned char *out
,
93 size_t length
, const ARIA_KEY
*key
,
94 unsigned char *ivec
, int *num
, const int enc
)
97 CRYPTO_cfb128_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
98 (block128_f
) aria_encrypt
);
101 static void aria_cfb1_encrypt(const unsigned char *in
, unsigned char *out
,
102 size_t length
, const ARIA_KEY
*key
,
103 unsigned char *ivec
, int *num
, const int enc
)
105 CRYPTO_cfb128_1_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
106 (block128_f
) aria_encrypt
);
109 static void aria_cfb8_encrypt(const unsigned char *in
, unsigned char *out
,
110 size_t length
, const ARIA_KEY
*key
,
111 unsigned char *ivec
, int *num
, const int enc
)
113 CRYPTO_cfb128_8_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
114 (block128_f
) aria_encrypt
);
117 static void aria_ecb_encrypt(const unsigned char *in
, unsigned char *out
,
118 const ARIA_KEY
*key
, const int enc
)
120 aria_encrypt(in
, out
, key
);
123 static void aria_ofb128_encrypt(const unsigned char *in
, unsigned char *out
,
124 size_t length
, const ARIA_KEY
*key
,
125 unsigned char *ivec
, int *num
)
127 CRYPTO_ofb128_encrypt(in
, out
, length
, key
, ivec
, num
,
128 (block128_f
) aria_encrypt
);
131 IMPLEMENT_BLOCK_CIPHER(aria_128
, ks
, aria
, EVP_ARIA_KEY
,
132 NID_aria_128
, 16, 16, 16, 128,
133 0, aria_init_key
, NULL
,
134 EVP_CIPHER_set_asn1_iv
,
135 EVP_CIPHER_get_asn1_iv
,
137 IMPLEMENT_BLOCK_CIPHER(aria_192
, ks
, aria
, EVP_ARIA_KEY
,
138 NID_aria_192
, 16, 24, 16, 128,
139 0, aria_init_key
, NULL
,
140 EVP_CIPHER_set_asn1_iv
,
141 EVP_CIPHER_get_asn1_iv
,
143 IMPLEMENT_BLOCK_CIPHER(aria_256
, ks
, aria
, EVP_ARIA_KEY
,
144 NID_aria_256
, 16, 32, 16, 128,
145 0, aria_init_key
, NULL
,
146 EVP_CIPHER_set_asn1_iv
,
147 EVP_CIPHER_get_asn1_iv
,
150 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
151 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
152 IMPLEMENT_ARIA_CFBR(128,1)
153 IMPLEMENT_ARIA_CFBR(192,1)
154 IMPLEMENT_ARIA_CFBR(256,1)
155 IMPLEMENT_ARIA_CFBR(128,8)
156 IMPLEMENT_ARIA_CFBR(192,8)
157 IMPLEMENT_ARIA_CFBR(256,8)
159 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
160 static const EVP_CIPHER aria_##keylen##_##mode = { \
161 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
162 flags|EVP_CIPH_##MODE##_MODE, \
164 aria_##mode##_cipher, \
166 sizeof(EVP_ARIA_KEY), \
167 NULL,NULL,NULL,NULL }; \
168 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
169 { return &aria_##keylen##_##mode; }
171 static int aria_ctr_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
172 const unsigned char *in
, size_t len
)
174 unsigned int num
= EVP_CIPHER_CTX_num(ctx
);
175 EVP_ARIA_KEY
*dat
= EVP_C_DATA(EVP_ARIA_KEY
,ctx
);
177 CRYPTO_ctr128_encrypt(in
, out
, len
, &dat
->ks
,
178 EVP_CIPHER_CTX_iv_noconst(ctx
),
179 EVP_CIPHER_CTX_buf_noconst(ctx
), &num
,
180 (block128_f
) aria_encrypt
);
181 EVP_CIPHER_CTX_set_num(ctx
, num
);
185 BLOCK_CIPHER_generic(NID_aria
, 128, 1, 16, ctr
, ctr
, CTR
, 0)
186 BLOCK_CIPHER_generic(NID_aria
, 192, 1, 16, ctr
, ctr
, CTR
, 0)
187 BLOCK_CIPHER_generic(NID_aria
, 256, 1, 16, ctr
, ctr
, CTR
, 0)
189 /* Authenticated cipher modes (GCM/CCM) */
191 /* increment counter (64-bit int) by 1 */
192 static void ctr64_inc(unsigned char *counter
)
207 static int aria_gcm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
208 const unsigned char *iv
, int enc
)
211 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,ctx
);
216 ret
= aria_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
218 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
,
219 (block128_f
) aria_encrypt
);
221 EVPerr(EVP_F_ARIA_GCM_INIT_KEY
,EVP_R_ARIA_KEY_SETUP_FAILED
);
226 * If we have an iv can set it directly, otherwise use saved IV.
228 if (iv
== NULL
&& gctx
->iv_set
)
231 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
236 /* If key set use IV, otherwise copy */
238 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
240 memcpy(gctx
->iv
, iv
, gctx
->ivlen
);
247 static int aria_gcm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
249 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,c
);
255 gctx
->ivlen
= EVP_CIPHER_CTX_iv_length(c
);
256 gctx
->iv
= EVP_CIPHER_CTX_iv_noconst(c
);
259 gctx
->tls_aad_len
= -1;
262 case EVP_CTRL_AEAD_SET_IVLEN
:
265 /* Allocate memory for IV if needed */
266 if ((arg
> EVP_MAX_IV_LENGTH
) && (arg
> gctx
->ivlen
)) {
267 if (gctx
->iv
!= EVP_CIPHER_CTX_iv_noconst(c
))
268 OPENSSL_free(gctx
->iv
);
269 if ((gctx
->iv
= OPENSSL_malloc(arg
)) == NULL
) {
270 EVPerr(EVP_F_ARIA_GCM_CTRL
, ERR_R_MALLOC_FAILURE
);
277 case EVP_CTRL_AEAD_SET_TAG
:
278 if (arg
<= 0 || arg
> 16 || EVP_CIPHER_CTX_encrypting(c
))
280 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
284 case EVP_CTRL_AEAD_GET_TAG
:
285 if (arg
<= 0 || arg
> 16 || !EVP_CIPHER_CTX_encrypting(c
)
288 memcpy(ptr
, EVP_CIPHER_CTX_buf_noconst(c
), arg
);
291 case EVP_CTRL_GCM_SET_IV_FIXED
:
292 /* Special case: -1 length restores whole IV */
294 memcpy(gctx
->iv
, ptr
, gctx
->ivlen
);
299 * Fixed field must be at least 4 bytes and invocation field at least
302 if ((arg
< 4) || (gctx
->ivlen
- arg
) < 8)
305 memcpy(gctx
->iv
, ptr
, arg
);
306 if (EVP_CIPHER_CTX_encrypting(c
)
307 && RAND_bytes(gctx
->iv
+ arg
, gctx
->ivlen
- arg
) <= 0)
312 case EVP_CTRL_GCM_IV_GEN
:
313 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0)
315 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
316 if (arg
<= 0 || arg
> gctx
->ivlen
)
318 memcpy(ptr
, gctx
->iv
+ gctx
->ivlen
- arg
, arg
);
320 * Invocation field will be at least 8 bytes in size and so no need
321 * to check wrap around or increment more than last 8 bytes.
323 ctr64_inc(gctx
->iv
+ gctx
->ivlen
- 8);
327 case EVP_CTRL_GCM_SET_IV_INV
:
328 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0
329 || EVP_CIPHER_CTX_encrypting(c
))
331 memcpy(gctx
->iv
+ gctx
->ivlen
- arg
, ptr
, arg
);
332 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
336 case EVP_CTRL_AEAD_TLS1_AAD
:
337 /* Save the AAD for later use */
338 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
340 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
341 gctx
->tls_aad_len
= arg
;
344 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] << 8
345 | EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1];
346 /* Correct length for explicit IV */
347 if (len
< EVP_GCM_TLS_EXPLICIT_IV_LEN
)
349 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
350 /* If decrypting correct for tag too */
351 if (!EVP_CIPHER_CTX_encrypting(c
)) {
352 if (len
< EVP_GCM_TLS_TAG_LEN
)
354 len
-= EVP_GCM_TLS_TAG_LEN
;
356 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] = len
>> 8;
357 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1] = len
& 0xff;
359 /* Extra padding: tag appended to record */
360 return EVP_GCM_TLS_TAG_LEN
;
364 EVP_CIPHER_CTX
*out
= ptr
;
365 EVP_ARIA_GCM_CTX
*gctx_out
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,out
);
367 if (gctx
->gcm
.key
!= &gctx
->ks
)
369 gctx_out
->gcm
.key
= &gctx_out
->ks
;
371 if (gctx
->iv
== EVP_CIPHER_CTX_iv_noconst(c
))
372 gctx_out
->iv
= EVP_CIPHER_CTX_iv_noconst(out
);
374 if ((gctx_out
->iv
= OPENSSL_malloc(gctx
->ivlen
)) == NULL
) {
375 EVPerr(EVP_F_ARIA_GCM_CTRL
, ERR_R_MALLOC_FAILURE
);
378 memcpy(gctx_out
->iv
, gctx
->iv
, gctx
->ivlen
);
389 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
390 const unsigned char *in
, size_t len
)
392 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,ctx
);
395 /* Encrypt/decrypt must be performed in place */
397 || len
< (EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
))
400 * Set IV from start of buffer or generate IV and write to start of
403 if (EVP_CIPHER_CTX_ctrl(ctx
, EVP_CIPHER_CTX_encrypting(ctx
) ?
404 EVP_CTRL_GCM_IV_GEN
: EVP_CTRL_GCM_SET_IV_INV
,
405 EVP_GCM_TLS_EXPLICIT_IV_LEN
, out
) <= 0)
408 if (CRYPTO_gcm128_aad(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
),
411 /* Fix buffer and length to point to payload */
412 in
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
413 out
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
414 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
415 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
416 /* Encrypt payload */
417 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, len
))
420 /* Finally write tag */
421 CRYPTO_gcm128_tag(&gctx
->gcm
, out
, EVP_GCM_TLS_TAG_LEN
);
422 rv
= len
+ EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
425 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, len
))
428 CRYPTO_gcm128_tag(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
),
429 EVP_GCM_TLS_TAG_LEN
);
430 /* If tag mismatch wipe buffer */
431 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx
), in
+ len
,
432 EVP_GCM_TLS_TAG_LEN
)) {
433 OPENSSL_cleanse(out
, len
);
441 gctx
->tls_aad_len
= -1;
445 static int aria_gcm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
446 const unsigned char *in
, size_t len
)
448 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,ctx
);
450 /* If not set up, return error */
454 if (gctx
->tls_aad_len
>= 0)
455 return aria_gcm_tls_cipher(ctx
, out
, in
, len
);
461 if (CRYPTO_gcm128_aad(&gctx
->gcm
, in
, len
))
463 } else if (EVP_CIPHER_CTX_encrypting(ctx
)) {
464 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, len
))
467 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, len
))
472 if (!EVP_CIPHER_CTX_encrypting(ctx
)) {
473 if (gctx
->taglen
< 0)
475 if (CRYPTO_gcm128_finish(&gctx
->gcm
,
476 EVP_CIPHER_CTX_buf_noconst(ctx
),
482 CRYPTO_gcm128_tag(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
), 16);
484 /* Don't reuse the IV */
489 static int aria_ccm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
490 const unsigned char *iv
, int enc
)
493 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,ctx
);
499 ret
= aria_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
501 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
502 &cctx
->ks
, (block128_f
) aria_encrypt
);
504 EVPerr(EVP_F_ARIA_CCM_INIT_KEY
,EVP_R_ARIA_KEY_SETUP_FAILED
);
511 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx
), iv
, 15 - cctx
->L
);
517 static int aria_ccm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
519 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,c
);
529 cctx
->tls_aad_len
= -1;
532 case EVP_CTRL_AEAD_TLS1_AAD
:
533 /* Save the AAD for later use */
534 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
536 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
537 cctx
->tls_aad_len
= arg
;
540 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] << 8
541 | EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1];
542 /* Correct length for explicit IV */
543 if (len
< EVP_CCM_TLS_EXPLICIT_IV_LEN
)
545 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
546 /* If decrypting correct for tag too */
547 if (!EVP_CIPHER_CTX_encrypting(c
)) {
552 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] = len
>> 8;
553 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1] = len
& 0xff;
555 /* Extra padding: tag appended to record */
558 case EVP_CTRL_CCM_SET_IV_FIXED
:
559 /* Sanity check length */
560 if (arg
!= EVP_CCM_TLS_FIXED_IV_LEN
)
562 /* Just copy to first part of IV */
563 memcpy(EVP_CIPHER_CTX_iv_noconst(c
), ptr
, arg
);
566 case EVP_CTRL_AEAD_SET_IVLEN
:
569 case EVP_CTRL_CCM_SET_L
:
570 if (arg
< 2 || arg
> 8)
574 case EVP_CTRL_AEAD_SET_TAG
:
575 if ((arg
& 1) || arg
< 4 || arg
> 16)
577 if (EVP_CIPHER_CTX_encrypting(c
) && ptr
)
581 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
586 case EVP_CTRL_AEAD_GET_TAG
:
587 if (!EVP_CIPHER_CTX_encrypting(c
) || !cctx
->tag_set
)
589 if (!CRYPTO_ccm128_tag(&cctx
->ccm
, ptr
, (size_t)arg
))
598 EVP_CIPHER_CTX
*out
= ptr
;
599 EVP_ARIA_CCM_CTX
*cctx_out
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,out
);
601 if (cctx
->ccm
.key
!= &cctx
->ks
)
603 cctx_out
->ccm
.key
= &cctx_out
->ks
;
613 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
614 const unsigned char *in
, size_t len
)
616 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,ctx
);
617 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
619 /* Encrypt/decrypt must be performed in place */
620 if (out
!= in
|| len
< (EVP_CCM_TLS_EXPLICIT_IV_LEN
+ (size_t)cctx
->M
))
622 /* If encrypting set explicit IV from sequence number (start of AAD) */
623 if (EVP_CIPHER_CTX_encrypting(ctx
))
624 memcpy(out
, EVP_CIPHER_CTX_buf_noconst(ctx
),
625 EVP_CCM_TLS_EXPLICIT_IV_LEN
);
626 /* Get rest of IV from explicit IV */
627 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx
) + EVP_CCM_TLS_FIXED_IV_LEN
, in
,
628 EVP_CCM_TLS_EXPLICIT_IV_LEN
);
629 /* Correct length value */
630 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->M
;
631 if (CRYPTO_ccm128_setiv(ccm
, EVP_CIPHER_CTX_iv_noconst(ctx
), 15 - cctx
->L
,
635 CRYPTO_ccm128_aad(ccm
, EVP_CIPHER_CTX_buf_noconst(ctx
), cctx
->tls_aad_len
);
636 /* Fix buffer to point to payload */
637 in
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
638 out
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
639 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
640 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
641 : CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
643 if (!CRYPTO_ccm128_tag(ccm
, out
+ len
, cctx
->M
))
645 return len
+ EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->M
;
647 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
648 : !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
649 unsigned char tag
[16];
650 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
651 if (!CRYPTO_memcmp(tag
, in
+ len
, cctx
->M
))
655 OPENSSL_cleanse(out
, len
);
660 static int aria_ccm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
661 const unsigned char *in
, size_t len
)
663 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,ctx
);
664 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
666 /* If not set up, return error */
670 if (cctx
->tls_aad_len
>= 0)
671 return aria_ccm_tls_cipher(ctx
, out
, in
, len
);
673 /* EVP_*Final() doesn't return any data */
674 if (in
== NULL
&& out
!= NULL
)
680 if (!EVP_CIPHER_CTX_encrypting(ctx
) && !cctx
->tag_set
)
684 if (CRYPTO_ccm128_setiv(ccm
, EVP_CIPHER_CTX_iv_noconst(ctx
),
690 /* If have AAD need message length */
691 if (!cctx
->len_set
&& len
)
693 CRYPTO_ccm128_aad(ccm
, in
, len
);
696 /* If not set length yet do it */
697 if (!cctx
->len_set
) {
698 if (CRYPTO_ccm128_setiv(ccm
, EVP_CIPHER_CTX_iv_noconst(ctx
),
703 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
704 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
705 : CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
711 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
,
713 !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
714 unsigned char tag
[16];
715 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
716 if (!CRYPTO_memcmp(tag
, EVP_CIPHER_CTX_buf_noconst(ctx
),
722 OPENSSL_cleanse(out
, len
);
730 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
731 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
732 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
733 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER)
735 #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
736 static const EVP_CIPHER aria_##keylen##_##mode = { \
737 nid##_##keylen##_##nmode, \
738 blocksize, keylen/8, ivlen, \
739 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
740 aria_##mode##_init_key, \
741 aria_##mode##_cipher, \
743 sizeof(EVP_ARIA_##MODE##_CTX), \
744 NULL,NULL,aria_##mode##_ctrl,NULL }; \
745 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
746 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
748 BLOCK_CIPHER_aead(NID_aria
, 128, 1, 12, gcm
, gcm
, GCM
, 0)
749 BLOCK_CIPHER_aead(NID_aria
, 192, 1, 12, gcm
, gcm
, GCM
, 0)
750 BLOCK_CIPHER_aead(NID_aria
, 256, 1, 12, gcm
, gcm
, GCM
, 0)
752 BLOCK_CIPHER_aead(NID_aria
, 128, 1, 12, ccm
, ccm
, CCM
, 0)
753 BLOCK_CIPHER_aead(NID_aria
, 192, 1, 12, ccm
, ccm
, CCM
, 0)
754 BLOCK_CIPHER_aead(NID_aria
, 256, 1, 12, ccm
, ccm
, CCM
, 0)