2 * Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
5 * Licensed under the Apache License 2.0 (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
11 #include "internal/cryptlib.h"
12 #ifndef OPENSSL_NO_ARIA
13 # include <openssl/evp.h>
14 # include <openssl/modes.h>
15 # include <openssl/rand.h>
16 # include "crypto/aria.h"
17 # include "crypto/evp.h"
18 # include "crypto/modes.h"
19 # include "evp_local.h"
21 /* ARIA subkey Structure */
26 /* ARIA GCM context */
31 } ks
; /* ARIA subkey to use */
32 int key_set
; /* Set if key initialised */
33 int iv_set
; /* Set if an iv is set */
35 unsigned char *iv
; /* Temporary IV store */
36 int ivlen
; /* IV length */
38 int iv_gen
; /* It is OK to generate IVs */
39 int tls_aad_len
; /* TLS AAD length */
42 /* ARIA CCM context */
47 } ks
; /* ARIA key schedule to use */
48 int key_set
; /* Set if key initialised */
49 int iv_set
; /* Set if an iv is set */
50 int tag_set
; /* Set if tag is valid */
51 int len_set
; /* Set if message length set */
52 int L
, M
; /* L and M parameters from RFC3610 */
53 int tls_aad_len
; /* TLS AAD length */
58 /* The subkey for ARIA is generated. */
59 static int aria_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
60 const unsigned char *iv
, int enc
)
63 int mode
= EVP_CIPHER_CTX_mode(ctx
);
65 if (enc
|| (mode
!= EVP_CIPH_ECB_MODE
&& mode
!= EVP_CIPH_CBC_MODE
))
66 ret
= ossl_aria_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
67 EVP_CIPHER_CTX_get_cipher_data(ctx
));
69 ret
= ossl_aria_set_decrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
70 EVP_CIPHER_CTX_get_cipher_data(ctx
));
72 ERR_raise(ERR_LIB_EVP
,EVP_R_ARIA_KEY_SETUP_FAILED
);
78 static void aria_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
79 size_t len
, const ARIA_KEY
*key
,
80 unsigned char *ivec
, const int enc
)
84 CRYPTO_cbc128_encrypt(in
, out
, len
, key
, ivec
,
85 (block128_f
) ossl_aria_encrypt
);
87 CRYPTO_cbc128_decrypt(in
, out
, len
, key
, ivec
,
88 (block128_f
) ossl_aria_encrypt
);
91 static void aria_cfb128_encrypt(const unsigned char *in
, unsigned char *out
,
92 size_t length
, const ARIA_KEY
*key
,
93 unsigned char *ivec
, int *num
, const int enc
)
96 CRYPTO_cfb128_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
97 (block128_f
) ossl_aria_encrypt
);
100 static void aria_cfb1_encrypt(const unsigned char *in
, unsigned char *out
,
101 size_t length
, const ARIA_KEY
*key
,
102 unsigned char *ivec
, int *num
, const int enc
)
104 CRYPTO_cfb128_1_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
105 (block128_f
) ossl_aria_encrypt
);
108 static void aria_cfb8_encrypt(const unsigned char *in
, unsigned char *out
,
109 size_t length
, const ARIA_KEY
*key
,
110 unsigned char *ivec
, int *num
, const int enc
)
112 CRYPTO_cfb128_8_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
113 (block128_f
) ossl_aria_encrypt
);
116 static void aria_ecb_encrypt(const unsigned char *in
, unsigned char *out
,
117 const ARIA_KEY
*key
, const int enc
)
119 ossl_aria_encrypt(in
, out
, key
);
122 static void aria_ofb128_encrypt(const unsigned char *in
, unsigned char *out
,
123 size_t length
, const ARIA_KEY
*key
,
124 unsigned char *ivec
, int *num
)
126 CRYPTO_ofb128_encrypt(in
, out
, length
, key
, ivec
, num
,
127 (block128_f
) ossl_aria_encrypt
);
130 IMPLEMENT_BLOCK_CIPHER(aria_128
, ks
, aria
, EVP_ARIA_KEY
,
131 NID_aria_128
, 16, 16, 16, 128,
132 0, aria_init_key
, NULL
,
133 EVP_CIPHER_set_asn1_iv
,
134 EVP_CIPHER_get_asn1_iv
,
136 IMPLEMENT_BLOCK_CIPHER(aria_192
, ks
, aria
, EVP_ARIA_KEY
,
137 NID_aria_192
, 16, 24, 16, 128,
138 0, aria_init_key
, NULL
,
139 EVP_CIPHER_set_asn1_iv
,
140 EVP_CIPHER_get_asn1_iv
,
142 IMPLEMENT_BLOCK_CIPHER(aria_256
, ks
, aria
, EVP_ARIA_KEY
,
143 NID_aria_256
, 16, 32, 16, 128,
144 0, aria_init_key
, NULL
,
145 EVP_CIPHER_set_asn1_iv
,
146 EVP_CIPHER_get_asn1_iv
,
149 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
150 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
151 IMPLEMENT_ARIA_CFBR(128,1)
152 IMPLEMENT_ARIA_CFBR(192,1)
153 IMPLEMENT_ARIA_CFBR(256,1)
154 IMPLEMENT_ARIA_CFBR(128,8)
155 IMPLEMENT_ARIA_CFBR(192,8)
156 IMPLEMENT_ARIA_CFBR(256,8)
158 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
159 static const EVP_CIPHER aria_##keylen##_##mode = { \
160 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
161 flags|EVP_CIPH_##MODE##_MODE, \
163 aria_##mode##_cipher, \
165 sizeof(EVP_ARIA_KEY), \
166 NULL,NULL,NULL,NULL }; \
167 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
168 { return &aria_##keylen##_##mode; }
170 static int aria_ctr_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
171 const unsigned char *in
, size_t len
)
173 unsigned int num
= EVP_CIPHER_CTX_num(ctx
);
174 EVP_ARIA_KEY
*dat
= EVP_C_DATA(EVP_ARIA_KEY
, ctx
);
176 CRYPTO_ctr128_encrypt(in
, out
, len
, &dat
->ks
, ctx
->iv
,
177 EVP_CIPHER_CTX_buf_noconst(ctx
), &num
,
178 (block128_f
) ossl_aria_encrypt
);
179 EVP_CIPHER_CTX_set_num(ctx
, num
);
183 BLOCK_CIPHER_generic(NID_aria
, 128, 1, 16, ctr
, ctr
, CTR
, 0)
184 BLOCK_CIPHER_generic(NID_aria
, 192, 1, 16, ctr
, ctr
, CTR
, 0)
185 BLOCK_CIPHER_generic(NID_aria
, 256, 1, 16, ctr
, ctr
, CTR
, 0)
187 /* Authenticated cipher modes (GCM/CCM) */
189 /* increment counter (64-bit int) by 1 */
190 static void ctr64_inc(unsigned char *counter
)
205 static int aria_gcm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
206 const unsigned char *iv
, int enc
)
209 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,ctx
);
214 ret
= ossl_aria_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
216 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
,
217 (block128_f
) ossl_aria_encrypt
);
219 ERR_raise(ERR_LIB_EVP
,EVP_R_ARIA_KEY_SETUP_FAILED
);
224 * If we have an iv can set it directly, otherwise use saved IV.
226 if (iv
== NULL
&& gctx
->iv_set
)
229 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
234 /* If key set use IV, otherwise copy */
236 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
238 memcpy(gctx
->iv
, iv
, gctx
->ivlen
);
245 static int aria_gcm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
247 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,c
);
253 gctx
->ivlen
= EVP_CIPHER_iv_length(c
->cipher
);
257 gctx
->tls_aad_len
= -1;
260 case EVP_CTRL_GET_IVLEN
:
261 *(int *)ptr
= gctx
->ivlen
;
264 case EVP_CTRL_AEAD_SET_IVLEN
:
267 /* Allocate memory for IV if needed */
268 if ((arg
> EVP_MAX_IV_LENGTH
) && (arg
> gctx
->ivlen
)) {
269 if (gctx
->iv
!= c
->iv
)
270 OPENSSL_free(gctx
->iv
);
271 if ((gctx
->iv
= OPENSSL_malloc(arg
)) == NULL
) {
272 ERR_raise(ERR_LIB_EVP
, ERR_R_MALLOC_FAILURE
);
279 case EVP_CTRL_AEAD_SET_TAG
:
280 if (arg
<= 0 || arg
> 16 || EVP_CIPHER_CTX_encrypting(c
))
282 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
286 case EVP_CTRL_AEAD_GET_TAG
:
287 if (arg
<= 0 || arg
> 16 || !EVP_CIPHER_CTX_encrypting(c
)
290 memcpy(ptr
, EVP_CIPHER_CTX_buf_noconst(c
), arg
);
293 case EVP_CTRL_GCM_SET_IV_FIXED
:
294 /* Special case: -1 length restores whole IV */
296 memcpy(gctx
->iv
, ptr
, gctx
->ivlen
);
301 * Fixed field must be at least 4 bytes and invocation field at least
304 if ((arg
< 4) || (gctx
->ivlen
- arg
) < 8)
307 memcpy(gctx
->iv
, ptr
, arg
);
308 if (EVP_CIPHER_CTX_encrypting(c
)
309 && RAND_bytes(gctx
->iv
+ arg
, gctx
->ivlen
- arg
) <= 0)
314 case EVP_CTRL_GCM_IV_GEN
:
315 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0)
317 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
318 if (arg
<= 0 || arg
> gctx
->ivlen
)
320 memcpy(ptr
, gctx
->iv
+ gctx
->ivlen
- arg
, arg
);
322 * Invocation field will be at least 8 bytes in size and so no need
323 * to check wrap around or increment more than last 8 bytes.
325 ctr64_inc(gctx
->iv
+ gctx
->ivlen
- 8);
329 case EVP_CTRL_GCM_SET_IV_INV
:
330 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0
331 || EVP_CIPHER_CTX_encrypting(c
))
333 memcpy(gctx
->iv
+ gctx
->ivlen
- arg
, ptr
, arg
);
334 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
338 case EVP_CTRL_AEAD_TLS1_AAD
:
339 /* Save the AAD for later use */
340 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
342 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
343 gctx
->tls_aad_len
= arg
;
346 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] << 8
347 | EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1];
348 /* Correct length for explicit IV */
349 if (len
< EVP_GCM_TLS_EXPLICIT_IV_LEN
)
351 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
352 /* If decrypting correct for tag too */
353 if (!EVP_CIPHER_CTX_encrypting(c
)) {
354 if (len
< EVP_GCM_TLS_TAG_LEN
)
356 len
-= EVP_GCM_TLS_TAG_LEN
;
358 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] = len
>> 8;
359 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1] = len
& 0xff;
361 /* Extra padding: tag appended to record */
362 return EVP_GCM_TLS_TAG_LEN
;
366 EVP_CIPHER_CTX
*out
= ptr
;
367 EVP_ARIA_GCM_CTX
*gctx_out
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,out
);
369 if (gctx
->gcm
.key
!= &gctx
->ks
)
371 gctx_out
->gcm
.key
= &gctx_out
->ks
;
373 if (gctx
->iv
== c
->iv
)
374 gctx_out
->iv
= out
->iv
;
376 if ((gctx_out
->iv
= OPENSSL_malloc(gctx
->ivlen
)) == NULL
) {
377 ERR_raise(ERR_LIB_EVP
, ERR_R_MALLOC_FAILURE
);
380 memcpy(gctx_out
->iv
, gctx
->iv
, gctx
->ivlen
);
391 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
392 const unsigned char *in
, size_t len
)
394 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,ctx
);
397 /* Encrypt/decrypt must be performed in place */
399 || len
< (EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
))
402 * Set IV from start of buffer or generate IV and write to start of
405 if (EVP_CIPHER_CTX_ctrl(ctx
, EVP_CIPHER_CTX_encrypting(ctx
) ?
406 EVP_CTRL_GCM_IV_GEN
: EVP_CTRL_GCM_SET_IV_INV
,
407 EVP_GCM_TLS_EXPLICIT_IV_LEN
, out
) <= 0)
410 if (CRYPTO_gcm128_aad(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
),
413 /* Fix buffer and length to point to payload */
414 in
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
415 out
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
416 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
417 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
418 /* Encrypt payload */
419 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, len
))
422 /* Finally write tag */
423 CRYPTO_gcm128_tag(&gctx
->gcm
, out
, EVP_GCM_TLS_TAG_LEN
);
424 rv
= len
+ EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
427 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, len
))
430 CRYPTO_gcm128_tag(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
),
431 EVP_GCM_TLS_TAG_LEN
);
432 /* If tag mismatch wipe buffer */
433 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx
), in
+ len
,
434 EVP_GCM_TLS_TAG_LEN
)) {
435 OPENSSL_cleanse(out
, len
);
443 gctx
->tls_aad_len
= -1;
447 static int aria_gcm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
448 const unsigned char *in
, size_t len
)
450 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
,ctx
);
452 /* If not set up, return error */
456 if (gctx
->tls_aad_len
>= 0)
457 return aria_gcm_tls_cipher(ctx
, out
, in
, len
);
463 if (CRYPTO_gcm128_aad(&gctx
->gcm
, in
, len
))
465 } else if (EVP_CIPHER_CTX_encrypting(ctx
)) {
466 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, len
))
469 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, len
))
474 if (!EVP_CIPHER_CTX_encrypting(ctx
)) {
475 if (gctx
->taglen
< 0)
477 if (CRYPTO_gcm128_finish(&gctx
->gcm
,
478 EVP_CIPHER_CTX_buf_noconst(ctx
),
484 CRYPTO_gcm128_tag(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
), 16);
486 /* Don't reuse the IV */
491 static int aria_gcm_cleanup(EVP_CIPHER_CTX
*ctx
)
493 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
, ctx
);
495 if (gctx
->iv
!= ctx
->iv
)
496 OPENSSL_free(gctx
->iv
);
501 static int aria_ccm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
502 const unsigned char *iv
, int enc
)
505 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,ctx
);
511 ret
= ossl_aria_set_encrypt_key(key
, EVP_CIPHER_CTX_key_length(ctx
) * 8,
513 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
514 &cctx
->ks
, (block128_f
) ossl_aria_encrypt
);
516 ERR_raise(ERR_LIB_EVP
,EVP_R_ARIA_KEY_SETUP_FAILED
);
523 memcpy(ctx
->iv
, iv
, 15 - cctx
->L
);
529 static int aria_ccm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
531 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,c
);
541 cctx
->tls_aad_len
= -1;
544 case EVP_CTRL_GET_IVLEN
:
545 *(int *)ptr
= 15 - cctx
->L
;
548 case EVP_CTRL_AEAD_TLS1_AAD
:
549 /* Save the AAD for later use */
550 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
552 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
553 cctx
->tls_aad_len
= arg
;
556 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] << 8
557 | EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1];
558 /* Correct length for explicit IV */
559 if (len
< EVP_CCM_TLS_EXPLICIT_IV_LEN
)
561 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
562 /* If decrypting correct for tag too */
563 if (!EVP_CIPHER_CTX_encrypting(c
)) {
568 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] = len
>> 8;
569 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1] = len
& 0xff;
571 /* Extra padding: tag appended to record */
574 case EVP_CTRL_CCM_SET_IV_FIXED
:
575 /* Sanity check length */
576 if (arg
!= EVP_CCM_TLS_FIXED_IV_LEN
)
578 /* Just copy to first part of IV */
579 memcpy(c
->iv
, ptr
, arg
);
582 case EVP_CTRL_AEAD_SET_IVLEN
:
585 case EVP_CTRL_CCM_SET_L
:
586 if (arg
< 2 || arg
> 8)
590 case EVP_CTRL_AEAD_SET_TAG
:
591 if ((arg
& 1) || arg
< 4 || arg
> 16)
593 if (EVP_CIPHER_CTX_encrypting(c
) && ptr
)
597 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
602 case EVP_CTRL_AEAD_GET_TAG
:
603 if (!EVP_CIPHER_CTX_encrypting(c
) || !cctx
->tag_set
)
605 if (!CRYPTO_ccm128_tag(&cctx
->ccm
, ptr
, (size_t)arg
))
614 EVP_CIPHER_CTX
*out
= ptr
;
615 EVP_ARIA_CCM_CTX
*cctx_out
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,out
);
617 if (cctx
->ccm
.key
!= &cctx
->ks
)
619 cctx_out
->ccm
.key
= &cctx_out
->ks
;
629 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
630 const unsigned char *in
, size_t len
)
632 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,ctx
);
633 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
635 /* Encrypt/decrypt must be performed in place */
636 if (out
!= in
|| len
< (EVP_CCM_TLS_EXPLICIT_IV_LEN
+ (size_t)cctx
->M
))
638 /* If encrypting set explicit IV from sequence number (start of AAD) */
639 if (EVP_CIPHER_CTX_encrypting(ctx
))
640 memcpy(out
, EVP_CIPHER_CTX_buf_noconst(ctx
),
641 EVP_CCM_TLS_EXPLICIT_IV_LEN
);
642 /* Get rest of IV from explicit IV */
643 memcpy(ctx
->iv
+ EVP_CCM_TLS_FIXED_IV_LEN
, in
,
644 EVP_CCM_TLS_EXPLICIT_IV_LEN
);
645 /* Correct length value */
646 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->M
;
647 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
,
651 CRYPTO_ccm128_aad(ccm
, EVP_CIPHER_CTX_buf_noconst(ctx
), cctx
->tls_aad_len
);
652 /* Fix buffer to point to payload */
653 in
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
654 out
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
655 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
656 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
657 : CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
659 if (!CRYPTO_ccm128_tag(ccm
, out
+ len
, cctx
->M
))
661 return len
+ EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->M
;
663 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
664 : !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
665 unsigned char tag
[16];
666 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
667 if (!CRYPTO_memcmp(tag
, in
+ len
, cctx
->M
))
671 OPENSSL_cleanse(out
, len
);
676 static int aria_ccm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
677 const unsigned char *in
, size_t len
)
679 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
,ctx
);
680 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
682 /* If not set up, return error */
686 if (cctx
->tls_aad_len
>= 0)
687 return aria_ccm_tls_cipher(ctx
, out
, in
, len
);
689 /* EVP_*Final() doesn't return any data */
690 if (in
== NULL
&& out
!= NULL
)
698 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
, len
))
703 /* If have AAD need message length */
704 if (!cctx
->len_set
&& len
)
706 CRYPTO_ccm128_aad(ccm
, in
, len
);
710 /* The tag must be set before actually decrypting data */
711 if (!EVP_CIPHER_CTX_encrypting(ctx
) && !cctx
->tag_set
)
714 /* If not set length yet do it */
715 if (!cctx
->len_set
) {
716 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
, len
))
720 if (EVP_CIPHER_CTX_encrypting(ctx
)) {
721 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
722 : CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
728 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
,
730 !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
731 unsigned char tag
[16];
732 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
733 if (!CRYPTO_memcmp(tag
, EVP_CIPHER_CTX_buf_noconst(ctx
),
739 OPENSSL_cleanse(out
, len
);
747 #define aria_ccm_cleanup NULL
749 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
750 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
751 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
752 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER \
753 | EVP_CIPH_CUSTOM_IV_LENGTH)
755 #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
756 static const EVP_CIPHER aria_##keylen##_##mode = { \
757 nid##_##keylen##_##nmode, \
758 blocksize, keylen/8, ivlen, \
759 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
760 aria_##mode##_init_key, \
761 aria_##mode##_cipher, \
762 aria_##mode##_cleanup, \
763 sizeof(EVP_ARIA_##MODE##_CTX), \
764 NULL,NULL,aria_##mode##_ctrl,NULL }; \
765 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
766 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
768 BLOCK_CIPHER_aead(NID_aria
, 128, 1, 12, gcm
, gcm
, GCM
, 0)
769 BLOCK_CIPHER_aead(NID_aria
, 192, 1, 12, gcm
, gcm
, GCM
, 0)
770 BLOCK_CIPHER_aead(NID_aria
, 256, 1, 12, gcm
, gcm
, GCM
, 0)
772 BLOCK_CIPHER_aead(NID_aria
, 128, 1, 12, ccm
, ccm
, CCM
, 0)
773 BLOCK_CIPHER_aead(NID_aria
, 192, 1, 12, ccm
, ccm
, CCM
, 0)
774 BLOCK_CIPHER_aead(NID_aria
, 256, 1, 12, ccm
, ccm
, CCM
, 0)