2 * Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
5 * Licensed under the Apache License 2.0 (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
11 #include "internal/cryptlib.h"
12 #ifndef OPENSSL_NO_ARIA
13 # include <openssl/evp.h>
14 # include <openssl/modes.h>
15 # include <openssl/rand.h>
16 # include "crypto/aria.h"
17 # include "crypto/evp.h"
18 # include "crypto/modes.h"
19 # include "evp_local.h"
21 /* ARIA subkey Structure */
26 /* ARIA GCM context */
31 } ks
; /* ARIA subkey to use */
32 int key_set
; /* Set if key initialised */
33 int iv_set
; /* Set if an iv is set */
35 unsigned char *iv
; /* Temporary IV store */
36 int ivlen
; /* IV length */
38 int iv_gen
; /* It is OK to generate IVs */
39 int tls_aad_len
; /* TLS AAD length */
42 /* ARIA CCM context */
47 } ks
; /* ARIA key schedule to use */
48 int key_set
; /* Set if key initialised */
49 int iv_set
; /* Set if an iv is set */
50 int tag_set
; /* Set if tag is valid */
51 int len_set
; /* Set if message length set */
52 int L
, M
; /* L and M parameters from RFC3610 */
53 int tls_aad_len
; /* TLS AAD length */
58 /* The subkey for ARIA is generated. */
59 static int aria_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
60 const unsigned char *iv
, int enc
)
63 int mode
= EVP_CIPHER_CTX_mode(ctx
);
65 if (enc
|| (mode
!= EVP_CIPH_ECB_MODE
&& mode
!= EVP_CIPH_CBC_MODE
))
66 ret
= ossl_aria_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
67 EVP_CIPHER_CTX_get_cipher_data(ctx
));
69 ret
= ossl_aria_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
70 EVP_CIPHER_CTX_get_cipher_data(ctx
));
72 ERR_raise(ERR_LIB_EVP
,EVP_R_ARIA_KEY_SETUP_FAILED
);
78 static void aria_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
79 size_t len
, const ARIA_KEY
*key
,
80 unsigned char *ivec
, const int enc
)
84 CRYPTO_cbc128_encrypt(in
, out
, len
, key
, ivec
,
85 (block128_f
) ossl_aria_encrypt
);
87 CRYPTO_cbc128_decrypt(in
, out
, len
, key
, ivec
,
88 (block128_f
) ossl_aria_encrypt
);
91 static void aria_cfb128_encrypt(const unsigned char *in
, unsigned char *out
,
92 size_t length
, const ARIA_KEY
*key
,
93 unsigned char *ivec
, int *num
, const int enc
)
96 CRYPTO_cfb128_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
97 (block128_f
) ossl_aria_encrypt
);
100 static void aria_cfb1_encrypt(const unsigned char *in
, unsigned char *out
,
101 size_t length
, const ARIA_KEY
*key
,
102 unsigned char *ivec
, int *num
, const int enc
)
104 CRYPTO_cfb128_1_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
105 (block128_f
) ossl_aria_encrypt
);
108 static void aria_cfb8_encrypt(const unsigned char *in
, unsigned char *out
,
109 size_t length
, const ARIA_KEY
*key
,
110 unsigned char *ivec
, int *num
, const int enc
)
112 CRYPTO_cfb128_8_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
113 (block128_f
) ossl_aria_encrypt
);
116 static void aria_ecb_encrypt(const unsigned char *in
, unsigned char *out
,
117 const ARIA_KEY
*key
, const int enc
)
119 ossl_aria_encrypt(in
, out
, key
);
122 static void aria_ofb128_encrypt(const unsigned char *in
, unsigned char *out
,
123 size_t length
, const ARIA_KEY
*key
,
124 unsigned char *ivec
, int *num
)
126 CRYPTO_ofb128_encrypt(in
, out
, length
, key
, ivec
, num
,
127 (block128_f
) ossl_aria_encrypt
);
130 IMPLEMENT_BLOCK_CIPHER(aria_128
, ks
, aria
, EVP_ARIA_KEY
,
131 NID_aria_128
, 16, 16, 16, 128,
132 0, aria_init_key
, NULL
,
133 EVP_CIPHER_set_asn1_iv
,
134 EVP_CIPHER_get_asn1_iv
,
136 IMPLEMENT_BLOCK_CIPHER(aria_192
, ks
, aria
, EVP_ARIA_KEY
,
137 NID_aria_192
, 16, 24, 16, 128,
138 0, aria_init_key
, NULL
,
139 EVP_CIPHER_set_asn1_iv
,
140 EVP_CIPHER_get_asn1_iv
,
142 IMPLEMENT_BLOCK_CIPHER(aria_256
, ks
, aria
, EVP_ARIA_KEY
,
143 NID_aria_256
, 16, 32, 16, 128,
144 0, aria_init_key
, NULL
,
145 EVP_CIPHER_set_asn1_iv
,
146 EVP_CIPHER_get_asn1_iv
,
149 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
150 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
151 IMPLEMENT_ARIA_CFBR(128,1)
152 IMPLEMENT_ARIA_CFBR(192,1)
153 IMPLEMENT_ARIA_CFBR(256,1)
154 IMPLEMENT_ARIA_CFBR(128,8)
155 IMPLEMENT_ARIA_CFBR(192,8)
156 IMPLEMENT_ARIA_CFBR(256,8)
158 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
159 static const EVP_CIPHER aria_##keylen##_##mode = { \
160 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
161 flags|EVP_CIPH_##MODE##_MODE, \
164 aria_##mode##_cipher, \
166 sizeof(EVP_ARIA_KEY), \
167 NULL,NULL,NULL,NULL }; \
168 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
169 { return &aria_##keylen##_##mode; }
171 static int aria_ctr_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
172 const unsigned char *in
, size_t len
)
174 unsigned int num
= EVP_CIPHER_CTX_num(ctx
);
175 EVP_ARIA_KEY
*dat
= EVP_C_DATA(EVP_ARIA_KEY
, ctx
);
177 CRYPTO_ctr128_encrypt(in
, out
, len
, &dat
->ks
, ctx
->iv
,
178 EVP_CIPHER_CTX_buf_noconst(ctx
), &num
,
179 (block128_f
) ossl_aria_encrypt
);
180 EVP_CIPHER_CTX_set_num(ctx
, num
);
184 BLOCK_CIPHER_generic(NID_aria
, 128, 1, 16, ctr
, ctr
, CTR
, 0)
185 BLOCK_CIPHER_generic(NID_aria
, 192, 1, 16, ctr
, ctr
, CTR
, 0)
186 BLOCK_CIPHER_generic(NID_aria
, 256, 1, 16, ctr
, ctr
, CTR
, 0)
188 /* Authenticated cipher modes (GCM/CCM) */
190 /* increment counter (64-bit int) by 1 */
191 static void ctr64_inc(unsigned char *counter
)
206 static int aria_gcm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
207 const unsigned char *iv
, int enc
)
210 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,ctx
);
215 ret
= ossl_aria_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
217 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
,
218 (block128_f
) ossl_aria_encrypt
);
220 ERR_raise(ERR_LIB_EVP
,EVP_R_ARIA_KEY_SETUP_FAILED
);
225 * If we have an iv can set it directly, otherwise use saved IV.
227 if (iv
== NULL
&& gctx
->iv_set
)
230 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
235 /* If key set use IV, otherwise copy */
237 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
239 memcpy(gctx
->iv
, iv
, gctx
->ivlen
);
246 static int aria_gcm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
248 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,c
);
254 gctx
->ivlen
= EVP_CIPHER_iv_length(c
->cipher
);
258 gctx
->tls_aad_len
= -1;
261 case EVP_CTRL_GET_IVLEN
:
262 *(int *)ptr
= gctx
->ivlen
;
265 case EVP_CTRL_AEAD_SET_IVLEN
:
268 /* Allocate memory for IV if needed */
269 if ((arg
> EVP_MAX_IV_LENGTH
) && (arg
> gctx
->ivlen
)) {
270 if (gctx
->iv
!= c
->iv
)
271 OPENSSL_free(gctx
->iv
);
272 if ((gctx
->iv
= OPENSSL_malloc(arg
)) == NULL
) {
273 ERR_raise(ERR_LIB_EVP
, ERR_R_MALLOC_FAILURE
);
280 case EVP_CTRL_AEAD_SET_TAG
:
281 if (arg
<= 0 || arg
> 16 || EVP_CIPHER_CTX_encrypting(c
))
283 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
287 case EVP_CTRL_AEAD_GET_TAG
:
288 if (arg
<= 0 || arg
> 16 || !EVP_CIPHER_CTX_encrypting(c
)
291 memcpy(ptr
, EVP_CIPHER_CTX_buf_noconst(c
), arg
);
294 case EVP_CTRL_GCM_SET_IV_FIXED
:
295 /* Special case: -1 length restores whole IV */
297 memcpy(gctx
->iv
, ptr
, gctx
->ivlen
);
302 * Fixed field must be at least 4 bytes and invocation field at least
305 if ((arg
< 4) || (gctx
->ivlen
- arg
) < 8)
308 memcpy(gctx
->iv
, ptr
, arg
);
309 if (EVP_CIPHER_CTX_encrypting(c
)
310 && RAND_bytes(gctx
->iv
+ arg
, gctx
->ivlen
- arg
) <= 0)
315 case EVP_CTRL_GCM_IV_GEN
:
316 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0)
318 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
319 if (arg
<= 0 || arg
> gctx
->ivlen
)
321 memcpy(ptr
, gctx
->iv
+ gctx
->ivlen
- arg
, arg
);
323 * Invocation field will be at least 8 bytes in size and so no need
324 * to check wrap around or increment more than last 8 bytes.
326 ctr64_inc(gctx
->iv
+ gctx
->ivlen
- 8);
330 case EVP_CTRL_GCM_SET_IV_INV
:
331 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0
332 || EVP_CIPHER_CTX_encrypting(c
))
334 memcpy(gctx
->iv
+ gctx
->ivlen
- arg
, ptr
, arg
);
335 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
339 case EVP_CTRL_AEAD_TLS1_AAD
:
340 /* Save the AAD for later use */
341 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
343 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
344 gctx
->tls_aad_len
= arg
;
347 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] << 8
348 | EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1];
349 /* Correct length for explicit IV */
350 if (len
< EVP_GCM_TLS_EXPLICIT_IV_LEN
)
352 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
353 /* If decrypting correct for tag too */
354 if (!EVP_CIPHER_CTX_encrypting(c
)) {
355 if (len
< EVP_GCM_TLS_TAG_LEN
)
357 len
-= EVP_GCM_TLS_TAG_LEN
;
359 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] = len
>> 8;
360 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1] = len
& 0xff;
362 /* Extra padding: tag appended to record */
363 return EVP_GCM_TLS_TAG_LEN
;
367 EVP_CIPHER_CTX
*out
= ptr
;
368 EVP_ARIA_GCM_CTX
*gctx_out
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,out
);
370 if (gctx
->gcm
.key
!= &gctx
->ks
)
372 gctx_out
->gcm
.key
= &gctx_out
->ks
;
374 if (gctx
->iv
== c
->iv
)
375 gctx_out
->iv
= out
->iv
;
377 if ((gctx_out
->iv
= OPENSSL_malloc(gctx
->ivlen
)) == NULL
) {
378 ERR_raise(ERR_LIB_EVP
, ERR_R_MALLOC_FAILURE
);
381 memcpy(gctx_out
->iv
, gctx
->iv
, gctx
->ivlen
);
392 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
393 const unsigned char *in
, size_t len
)
395 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,ctx
);
398 /* Encrypt/decrypt must be performed in place */
400 || len
< (EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
))
403 * Set IV from start of buffer or generate IV and write to start of
406 if (EVP_CIPHER_CTX_ctrl(ctx
, EVP_CIPHER_CTX_encrypting(ctx
) ?
407 EVP_CTRL_GCM_IV_GEN
: EVP_CTRL_GCM_SET_IV_INV
,
408 EVP_GCM_TLS_EXPLICIT_IV_LEN
, out
) <= 0)
411 if (CRYPTO_gcm128_aad(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
),
414 /* Fix buffer and length to point to payload */
415 in
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
416 out
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
417 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
418 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
419 /* Encrypt payload */
420 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, len
))
423 /* Finally write tag */
424 CRYPTO_gcm128_tag(&gctx
->gcm
, out
, EVP_GCM_TLS_TAG_LEN
);
425 rv
= len
+ EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
428 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, len
))
431 CRYPTO_gcm128_tag(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
),
432 EVP_GCM_TLS_TAG_LEN
);
433 /* If tag mismatch wipe buffer */
434 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx
), in
+ len
,
435 EVP_GCM_TLS_TAG_LEN
)) {
436 OPENSSL_cleanse(out
, len
);
444 gctx
->tls_aad_len
= -1;
448 static int aria_gcm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
449 const unsigned char *in
, size_t len
)
451 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,ctx
);
453 /* If not set up, return error */
457 if (gctx
->tls_aad_len
>= 0)
458 return aria_gcm_tls_cipher(ctx
, out
, in
, len
);
464 if (CRYPTO_gcm128_aad(&gctx
->gcm
, in
, len
))
466 } else if (EVP_CIPHER_CTX_encrypting(ctx
)) {
467 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, len
))
470 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, len
))
475 if (!EVP_CIPHER_CTX_encrypting(ctx
)) {
476 if (gctx
->taglen
< 0)
478 if (CRYPTO_gcm128_finish(&gctx
->gcm
,
479 EVP_CIPHER_CTX_buf_noconst(ctx
),
485 CRYPTO_gcm128_tag(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
), 16);
487 /* Don't reuse the IV */
492 static int aria_gcm_cleanup(EVP_CIPHER_CTX
*ctx
)
494 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
, ctx
);
496 if (gctx
->iv
!= ctx
->iv
)
497 OPENSSL_free(gctx
->iv
);
502 static int aria_ccm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
503 const unsigned char *iv
, int enc
)
506 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,ctx
);
512 ret
= ossl_aria_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
514 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
515 &cctx
->ks
, (block128_f
) ossl_aria_encrypt
);
517 ERR_raise(ERR_LIB_EVP
,EVP_R_ARIA_KEY_SETUP_FAILED
);
524 memcpy(ctx
->iv
, iv
, 15 - cctx
->L
);
530 static int aria_ccm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
532 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,c
);
542 cctx
->tls_aad_len
= -1;
545 case EVP_CTRL_GET_IVLEN
:
546 *(int *)ptr
= 15 - cctx
->L
;
549 case EVP_CTRL_AEAD_TLS1_AAD
:
550 /* Save the AAD for later use */
551 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
553 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
554 cctx
->tls_aad_len
= arg
;
557 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] << 8
558 | EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1];
559 /* Correct length for explicit IV */
560 if (len
< EVP_CCM_TLS_EXPLICIT_IV_LEN
)
562 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
563 /* If decrypting correct for tag too */
564 if (!EVP_CIPHER_CTX_encrypting(c
)) {
569 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] = len
>> 8;
570 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1] = len
& 0xff;
572 /* Extra padding: tag appended to record */
575 case EVP_CTRL_CCM_SET_IV_FIXED
:
576 /* Sanity check length */
577 if (arg
!= EVP_CCM_TLS_FIXED_IV_LEN
)
579 /* Just copy to first part of IV */
580 memcpy(c
->iv
, ptr
, arg
);
583 case EVP_CTRL_AEAD_SET_IVLEN
:
586 case EVP_CTRL_CCM_SET_L
:
587 if (arg
< 2 || arg
> 8)
591 case EVP_CTRL_AEAD_SET_TAG
:
592 if ((arg
& 1) || arg
< 4 || arg
> 16)
594 if (EVP_CIPHER_CTX_encrypting(c
) && ptr
)
598 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
603 case EVP_CTRL_AEAD_GET_TAG
:
604 if (!EVP_CIPHER_CTX_encrypting(c
) || !cctx
->tag_set
)
606 if (!CRYPTO_ccm128_tag(&cctx
->ccm
, ptr
, (size_t)arg
))
615 EVP_CIPHER_CTX
*out
= ptr
;
616 EVP_ARIA_CCM_CTX
*cctx_out
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,out
);
618 if (cctx
->ccm
.key
!= &cctx
->ks
)
620 cctx_out
->ccm
.key
= &cctx_out
->ks
;
630 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
631 const unsigned char *in
, size_t len
)
633 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,ctx
);
634 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
636 /* Encrypt/decrypt must be performed in place */
637 if (out
!= in
|| len
< (EVP_CCM_TLS_EXPLICIT_IV_LEN
+ (size_t)cctx
->M
))
639 /* If encrypting set explicit IV from sequence number (start of AAD) */
640 if (EVP_CIPHER_CTX_encrypting(ctx
))
641 memcpy(out
, EVP_CIPHER_CTX_buf_noconst(ctx
),
642 EVP_CCM_TLS_EXPLICIT_IV_LEN
);
643 /* Get rest of IV from explicit IV */
644 memcpy(ctx
->iv
+ EVP_CCM_TLS_FIXED_IV_LEN
, in
,
645 EVP_CCM_TLS_EXPLICIT_IV_LEN
);
646 /* Correct length value */
647 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->M
;
648 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
,
652 CRYPTO_ccm128_aad(ccm
, EVP_CIPHER_CTX_buf_noconst(ctx
), cctx
->tls_aad_len
);
653 /* Fix buffer to point to payload */
654 in
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
655 out
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
656 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
657 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
658 : CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
660 if (!CRYPTO_ccm128_tag(ccm
, out
+ len
, cctx
->M
))
662 return len
+ EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->M
;
664 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
665 : !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
666 unsigned char tag
[16];
667 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
668 if (!CRYPTO_memcmp(tag
, in
+ len
, cctx
->M
))
672 OPENSSL_cleanse(out
, len
);
677 static int aria_ccm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
678 const unsigned char *in
, size_t len
)
680 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,ctx
);
681 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
683 /* If not set up, return error */
687 if (cctx
->tls_aad_len
>= 0)
688 return aria_ccm_tls_cipher(ctx
, out
, in
, len
);
690 /* EVP_*Final() doesn't return any data */
691 if (in
== NULL
&& out
!= NULL
)
699 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
, len
))
704 /* If have AAD need message length */
705 if (!cctx
->len_set
&& len
)
707 CRYPTO_ccm128_aad(ccm
, in
, len
);
711 /* The tag must be set before actually decrypting data */
712 if (!EVP_CIPHER_CTX_encrypting(ctx
) && !cctx
->tag_set
)
715 /* If not set length yet do it */
716 if (!cctx
->len_set
) {
717 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
, len
))
721 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
722 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
723 : CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
729 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
,
731 !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
732 unsigned char tag
[16];
733 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
734 if (!CRYPTO_memcmp(tag
, EVP_CIPHER_CTX_buf_noconst(ctx
),
740 OPENSSL_cleanse(out
, len
);
748 #define aria_ccm_cleanup NULL
750 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
751 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
752 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
753 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER \
754 | EVP_CIPH_CUSTOM_IV_LENGTH)
756 #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
757 static const EVP_CIPHER aria_##keylen##_##mode = { \
758 nid##_##keylen##_##nmode, \
759 blocksize, keylen/8, ivlen, \
760 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
762 aria_##mode##_init_key, \
763 aria_##mode##_cipher, \
764 aria_##mode##_cleanup, \
765 sizeof(EVP_ARIA_##MODE##_CTX), \
766 NULL,NULL,aria_##mode##_ctrl,NULL }; \
767 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
768 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
770 BLOCK_CIPHER_aead(NID_aria
, 128, 1, 12, gcm
, gcm
, GCM
, 0)
771 BLOCK_CIPHER_aead(NID_aria
, 192, 1, 12, gcm
, gcm
, GCM
, 0)
772 BLOCK_CIPHER_aead(NID_aria
, 256, 1, 12, gcm
, gcm
, GCM
, 0)
774 BLOCK_CIPHER_aead(NID_aria
, 128, 1, 12, ccm
, ccm
, CCM
, 0)
775 BLOCK_CIPHER_aead(NID_aria
, 192, 1, 12, ccm
, ccm
, CCM
, 0)
776 BLOCK_CIPHER_aead(NID_aria
, 256, 1, 12, ccm
, ccm
, CCM
, 0)