2 * Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
5 * Licensed under the Apache License 2.0 (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
11 #include "internal/deprecated.h"
13 #include "internal/cryptlib.h"
14 #ifndef OPENSSL_NO_ARIA
15 # include <openssl/evp.h>
16 # include <openssl/modes.h>
17 # include <openssl/rand.h>
18 # include "crypto/aria.h"
19 # include "crypto/evp.h"
20 # include "crypto/modes.h"
21 # include "evp_local.h"
23 /* ARIA subkey Structure */
28 /* ARIA GCM context */
33 } ks
; /* ARIA subkey to use */
34 int key_set
; /* Set if key initialised */
35 int iv_set
; /* Set if an iv is set */
37 unsigned char *iv
; /* Temporary IV store */
38 int ivlen
; /* IV length */
40 int iv_gen
; /* It is OK to generate IVs */
41 int tls_aad_len
; /* TLS AAD length */
44 /* ARIA CCM context */
49 } ks
; /* ARIA key schedule to use */
50 int key_set
; /* Set if key initialised */
51 int iv_set
; /* Set if an iv is set */
52 int tag_set
; /* Set if tag is valid */
53 int len_set
; /* Set if message length set */
54 int L
, M
; /* L and M parameters from RFC3610 */
55 int tls_aad_len
; /* TLS AAD length */
60 /* The subkey for ARIA is generated. */
61 static int aria_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
62 const unsigned char *iv
, int enc
)
65 int mode
= EVP_CIPHER_CTX_get_mode(ctx
);
67 if (enc
|| (mode
!= EVP_CIPH_ECB_MODE
&& mode
!= EVP_CIPH_CBC_MODE
))
68 ret
= ossl_aria_set_encrypt_key(key
,
69 EVP_CIPHER_CTX_get_key_length(ctx
) * 8,
70 EVP_CIPHER_CTX_get_cipher_data(ctx
));
72 ret
= ossl_aria_set_decrypt_key(key
,
73 EVP_CIPHER_CTX_get_key_length(ctx
) * 8,
74 EVP_CIPHER_CTX_get_cipher_data(ctx
));
76 ERR_raise(ERR_LIB_EVP
, EVP_R_ARIA_KEY_SETUP_FAILED
);
82 static void aria_cbc_encrypt(const unsigned char *in
, unsigned char *out
,
83 size_t len
, const ARIA_KEY
*key
,
84 unsigned char *ivec
, const int enc
)
88 CRYPTO_cbc128_encrypt(in
, out
, len
, key
, ivec
,
89 (block128_f
) ossl_aria_encrypt
);
91 CRYPTO_cbc128_decrypt(in
, out
, len
, key
, ivec
,
92 (block128_f
) ossl_aria_encrypt
);
95 static void aria_cfb128_encrypt(const unsigned char *in
, unsigned char *out
,
96 size_t length
, const ARIA_KEY
*key
,
97 unsigned char *ivec
, int *num
, const int enc
)
100 CRYPTO_cfb128_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
101 (block128_f
) ossl_aria_encrypt
);
104 static void aria_cfb1_encrypt(const unsigned char *in
, unsigned char *out
,
105 size_t length
, const ARIA_KEY
*key
,
106 unsigned char *ivec
, int *num
, const int enc
)
108 CRYPTO_cfb128_1_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
109 (block128_f
) ossl_aria_encrypt
);
112 static void aria_cfb8_encrypt(const unsigned char *in
, unsigned char *out
,
113 size_t length
, const ARIA_KEY
*key
,
114 unsigned char *ivec
, int *num
, const int enc
)
116 CRYPTO_cfb128_8_encrypt(in
, out
, length
, key
, ivec
, num
, enc
,
117 (block128_f
) ossl_aria_encrypt
);
120 static void aria_ecb_encrypt(const unsigned char *in
, unsigned char *out
,
121 const ARIA_KEY
*key
, const int enc
)
123 ossl_aria_encrypt(in
, out
, key
);
126 static void aria_ofb128_encrypt(const unsigned char *in
, unsigned char *out
,
127 size_t length
, const ARIA_KEY
*key
,
128 unsigned char *ivec
, int *num
)
130 CRYPTO_ofb128_encrypt(in
, out
, length
, key
, ivec
, num
,
131 (block128_f
) ossl_aria_encrypt
);
134 IMPLEMENT_BLOCK_CIPHER(aria_128
, ks
, aria
, EVP_ARIA_KEY
,
135 NID_aria_128
, 16, 16, 16, 128,
136 0, aria_init_key
, NULL
,
137 EVP_CIPHER_set_asn1_iv
,
138 EVP_CIPHER_get_asn1_iv
,
140 IMPLEMENT_BLOCK_CIPHER(aria_192
, ks
, aria
, EVP_ARIA_KEY
,
141 NID_aria_192
, 16, 24, 16, 128,
142 0, aria_init_key
, NULL
,
143 EVP_CIPHER_set_asn1_iv
,
144 EVP_CIPHER_get_asn1_iv
,
146 IMPLEMENT_BLOCK_CIPHER(aria_256
, ks
, aria
, EVP_ARIA_KEY
,
147 NID_aria_256
, 16, 32, 16, 128,
148 0, aria_init_key
, NULL
,
149 EVP_CIPHER_set_asn1_iv
,
150 EVP_CIPHER_get_asn1_iv
,
153 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
154 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
155 IMPLEMENT_ARIA_CFBR(128,1)
156 IMPLEMENT_ARIA_CFBR(192,1)
157 IMPLEMENT_ARIA_CFBR(256,1)
158 IMPLEMENT_ARIA_CFBR(128,8)
159 IMPLEMENT_ARIA_CFBR(192,8)
160 IMPLEMENT_ARIA_CFBR(256,8)
162 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
163 static const EVP_CIPHER aria_##keylen##_##mode = { \
164 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
165 flags|EVP_CIPH_##MODE##_MODE, \
168 aria_##mode##_cipher, \
170 sizeof(EVP_ARIA_KEY), \
171 NULL,NULL,NULL,NULL }; \
172 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
173 { return &aria_##keylen##_##mode; }
175 static int aria_ctr_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
176 const unsigned char *in
, size_t len
)
178 int n
= EVP_CIPHER_CTX_get_num(ctx
);
180 EVP_ARIA_KEY
*dat
= EVP_C_DATA(EVP_ARIA_KEY
, ctx
);
184 num
= (unsigned int)n
;
186 CRYPTO_ctr128_encrypt(in
, out
, len
, &dat
->ks
, ctx
->iv
,
187 EVP_CIPHER_CTX_buf_noconst(ctx
), &num
,
188 (block128_f
) ossl_aria_encrypt
);
189 EVP_CIPHER_CTX_set_num(ctx
, num
);
193 BLOCK_CIPHER_generic(NID_aria
, 128, 1, 16, ctr
, ctr
, CTR
, 0)
194 BLOCK_CIPHER_generic(NID_aria
, 192, 1, 16, ctr
, ctr
, CTR
, 0)
195 BLOCK_CIPHER_generic(NID_aria
, 256, 1, 16, ctr
, ctr
, CTR
, 0)
197 /* Authenticated cipher modes (GCM/CCM) */
199 /* increment counter (64-bit int) by 1 */
200 static void ctr64_inc(unsigned char *counter
)
215 static int aria_gcm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
216 const unsigned char *iv
, int enc
)
219 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
, ctx
);
224 ret
= ossl_aria_set_encrypt_key(key
,
225 EVP_CIPHER_CTX_get_key_length(ctx
) * 8,
227 CRYPTO_gcm128_init(&gctx
->gcm
, &gctx
->ks
,
228 (block128_f
) ossl_aria_encrypt
);
230 ERR_raise(ERR_LIB_EVP
, EVP_R_ARIA_KEY_SETUP_FAILED
);
235 * If we have an iv can set it directly, otherwise use saved IV.
237 if (iv
== NULL
&& gctx
->iv_set
)
240 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
245 /* If key set use IV, otherwise copy */
247 CRYPTO_gcm128_setiv(&gctx
->gcm
, iv
, gctx
->ivlen
);
249 memcpy(gctx
->iv
, iv
, gctx
->ivlen
);
256 static int aria_gcm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
258 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
, c
);
264 gctx
->ivlen
= EVP_CIPHER_get_iv_length(c
->cipher
);
268 gctx
->tls_aad_len
= -1;
271 case EVP_CTRL_GET_IVLEN
:
272 *(int *)ptr
= gctx
->ivlen
;
275 case EVP_CTRL_AEAD_SET_IVLEN
:
278 /* Allocate memory for IV if needed */
279 if ((arg
> EVP_MAX_IV_LENGTH
) && (arg
> gctx
->ivlen
)) {
280 if (gctx
->iv
!= c
->iv
)
281 OPENSSL_free(gctx
->iv
);
282 if ((gctx
->iv
= OPENSSL_malloc(arg
)) == NULL
)
288 case EVP_CTRL_AEAD_SET_TAG
:
289 if (arg
<= 0 || arg
> 16 || EVP_CIPHER_CTX_is_encrypting(c
))
291 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
295 case EVP_CTRL_AEAD_GET_TAG
:
296 if (arg
<= 0 || arg
> 16 || !EVP_CIPHER_CTX_is_encrypting(c
)
299 memcpy(ptr
, EVP_CIPHER_CTX_buf_noconst(c
), arg
);
302 case EVP_CTRL_GCM_SET_IV_FIXED
:
303 /* Special case: -1 length restores whole IV */
305 memcpy(gctx
->iv
, ptr
, gctx
->ivlen
);
310 * Fixed field must be at least 4 bytes and invocation field at least
313 if ((arg
< 4) || (gctx
->ivlen
- arg
) < 8)
316 memcpy(gctx
->iv
, ptr
, arg
);
317 if (EVP_CIPHER_CTX_is_encrypting(c
)
318 && RAND_bytes(gctx
->iv
+ arg
, gctx
->ivlen
- arg
) <= 0)
323 case EVP_CTRL_GCM_IV_GEN
:
324 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0)
326 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
327 if (arg
<= 0 || arg
> gctx
->ivlen
)
329 memcpy(ptr
, gctx
->iv
+ gctx
->ivlen
- arg
, arg
);
331 * Invocation field will be at least 8 bytes in size and so no need
332 * to check wrap around or increment more than last 8 bytes.
334 ctr64_inc(gctx
->iv
+ gctx
->ivlen
- 8);
338 case EVP_CTRL_GCM_SET_IV_INV
:
339 if (gctx
->iv_gen
== 0 || gctx
->key_set
== 0
340 || EVP_CIPHER_CTX_is_encrypting(c
))
342 memcpy(gctx
->iv
+ gctx
->ivlen
- arg
, ptr
, arg
);
343 CRYPTO_gcm128_setiv(&gctx
->gcm
, gctx
->iv
, gctx
->ivlen
);
347 case EVP_CTRL_AEAD_TLS1_AAD
:
348 /* Save the AAD for later use */
349 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
351 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
352 gctx
->tls_aad_len
= arg
;
355 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] << 8
356 | EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1];
357 /* Correct length for explicit IV */
358 if (len
< EVP_GCM_TLS_EXPLICIT_IV_LEN
)
360 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
361 /* If decrypting correct for tag too */
362 if (!EVP_CIPHER_CTX_is_encrypting(c
)) {
363 if (len
< EVP_GCM_TLS_TAG_LEN
)
365 len
-= EVP_GCM_TLS_TAG_LEN
;
367 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] = len
>> 8;
368 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1] = len
& 0xff;
370 /* Extra padding: tag appended to record */
371 return EVP_GCM_TLS_TAG_LEN
;
375 EVP_CIPHER_CTX
*out
= ptr
;
376 EVP_ARIA_GCM_CTX
*gctx_out
= EVP_C_DATA(EVP_ARIA_GCM_CTX
, out
);
378 if (gctx
->gcm
.key
!= &gctx
->ks
)
380 gctx_out
->gcm
.key
= &gctx_out
->ks
;
382 if (gctx
->iv
== c
->iv
)
383 gctx_out
->iv
= out
->iv
;
385 if ((gctx_out
->iv
= OPENSSL_malloc(gctx
->ivlen
)) == NULL
)
387 memcpy(gctx_out
->iv
, gctx
->iv
, gctx
->ivlen
);
398 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
399 const unsigned char *in
, size_t len
)
401 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
, ctx
);
404 /* Encrypt/decrypt must be performed in place */
406 || len
< (EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
))
409 * Set IV from start of buffer or generate IV and write to start of
412 if (EVP_CIPHER_CTX_ctrl(ctx
, EVP_CIPHER_CTX_is_encrypting(ctx
) ?
413 EVP_CTRL_GCM_IV_GEN
: EVP_CTRL_GCM_SET_IV_INV
,
414 EVP_GCM_TLS_EXPLICIT_IV_LEN
, out
) <= 0)
417 if (CRYPTO_gcm128_aad(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
),
420 /* Fix buffer and length to point to payload */
421 in
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
422 out
+= EVP_GCM_TLS_EXPLICIT_IV_LEN
;
423 len
-= EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
424 if (EVP_CIPHER_CTX_is_encrypting(ctx
)) {
425 /* Encrypt payload */
426 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, len
))
429 /* Finally write tag */
430 CRYPTO_gcm128_tag(&gctx
->gcm
, out
, EVP_GCM_TLS_TAG_LEN
);
431 rv
= len
+ EVP_GCM_TLS_EXPLICIT_IV_LEN
+ EVP_GCM_TLS_TAG_LEN
;
434 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, len
))
437 CRYPTO_gcm128_tag(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
),
438 EVP_GCM_TLS_TAG_LEN
);
439 /* If tag mismatch wipe buffer */
440 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx
), in
+ len
,
441 EVP_GCM_TLS_TAG_LEN
)) {
442 OPENSSL_cleanse(out
, len
);
450 gctx
->tls_aad_len
= -1;
454 static int aria_gcm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
455 const unsigned char *in
, size_t len
)
457 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
, ctx
);
459 /* If not set up, return error */
463 if (gctx
->tls_aad_len
>= 0)
464 return aria_gcm_tls_cipher(ctx
, out
, in
, len
);
470 if (CRYPTO_gcm128_aad(&gctx
->gcm
, in
, len
))
472 } else if (EVP_CIPHER_CTX_is_encrypting(ctx
)) {
473 if (CRYPTO_gcm128_encrypt(&gctx
->gcm
, in
, out
, len
))
476 if (CRYPTO_gcm128_decrypt(&gctx
->gcm
, in
, out
, len
))
481 if (!EVP_CIPHER_CTX_is_encrypting(ctx
)) {
482 if (gctx
->taglen
< 0)
484 if (CRYPTO_gcm128_finish(&gctx
->gcm
,
485 EVP_CIPHER_CTX_buf_noconst(ctx
),
491 CRYPTO_gcm128_tag(&gctx
->gcm
, EVP_CIPHER_CTX_buf_noconst(ctx
), 16);
493 /* Don't reuse the IV */
498 static int aria_gcm_cleanup(EVP_CIPHER_CTX
*ctx
)
500 EVP_ARIA_GCM_CTX
*gctx
= EVP_C_DATA(EVP_ARIA_GCM_CTX
, ctx
);
502 if (gctx
->iv
!= ctx
->iv
)
503 OPENSSL_free(gctx
->iv
);
508 static int aria_ccm_init_key(EVP_CIPHER_CTX
*ctx
, const unsigned char *key
,
509 const unsigned char *iv
, int enc
)
512 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
, ctx
);
518 ret
= ossl_aria_set_encrypt_key(key
,
519 EVP_CIPHER_CTX_get_key_length(ctx
) * 8,
521 CRYPTO_ccm128_init(&cctx
->ccm
, cctx
->M
, cctx
->L
,
522 &cctx
->ks
, (block128_f
) ossl_aria_encrypt
);
524 ERR_raise(ERR_LIB_EVP
, EVP_R_ARIA_KEY_SETUP_FAILED
);
531 memcpy(ctx
->iv
, iv
, 15 - cctx
->L
);
537 static int aria_ccm_ctrl(EVP_CIPHER_CTX
*c
, int type
, int arg
, void *ptr
)
539 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
, c
);
549 cctx
->tls_aad_len
= -1;
552 case EVP_CTRL_GET_IVLEN
:
553 *(int *)ptr
= 15 - cctx
->L
;
556 case EVP_CTRL_AEAD_TLS1_AAD
:
557 /* Save the AAD for later use */
558 if (arg
!= EVP_AEAD_TLS1_AAD_LEN
)
560 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
561 cctx
->tls_aad_len
= arg
;
564 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] << 8
565 | EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1];
566 /* Correct length for explicit IV */
567 if (len
< EVP_CCM_TLS_EXPLICIT_IV_LEN
)
569 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
570 /* If decrypting correct for tag too */
571 if (!EVP_CIPHER_CTX_is_encrypting(c
)) {
576 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 2] = len
>> 8;
577 EVP_CIPHER_CTX_buf_noconst(c
)[arg
- 1] = len
& 0xff;
579 /* Extra padding: tag appended to record */
582 case EVP_CTRL_CCM_SET_IV_FIXED
:
583 /* Sanity check length */
584 if (arg
!= EVP_CCM_TLS_FIXED_IV_LEN
)
586 /* Just copy to first part of IV */
587 memcpy(c
->iv
, ptr
, arg
);
590 case EVP_CTRL_AEAD_SET_IVLEN
:
593 case EVP_CTRL_CCM_SET_L
:
594 if (arg
< 2 || arg
> 8)
598 case EVP_CTRL_AEAD_SET_TAG
:
599 if ((arg
& 1) || arg
< 4 || arg
> 16)
601 if (EVP_CIPHER_CTX_is_encrypting(c
) && ptr
)
605 memcpy(EVP_CIPHER_CTX_buf_noconst(c
), ptr
, arg
);
610 case EVP_CTRL_AEAD_GET_TAG
:
611 if (!EVP_CIPHER_CTX_is_encrypting(c
) || !cctx
->tag_set
)
613 if (!CRYPTO_ccm128_tag(&cctx
->ccm
, ptr
, (size_t)arg
))
622 EVP_CIPHER_CTX
*out
= ptr
;
623 EVP_ARIA_CCM_CTX
*cctx_out
= EVP_C_DATA(EVP_ARIA_CCM_CTX
, out
);
625 if (cctx
->ccm
.key
!= &cctx
->ks
)
627 cctx_out
->ccm
.key
= &cctx_out
->ks
;
637 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
638 const unsigned char *in
, size_t len
)
640 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
, ctx
);
641 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
643 /* Encrypt/decrypt must be performed in place */
644 if (out
!= in
|| len
< (EVP_CCM_TLS_EXPLICIT_IV_LEN
+ (size_t)cctx
->M
))
646 /* If encrypting set explicit IV from sequence number (start of AAD) */
647 if (EVP_CIPHER_CTX_is_encrypting(ctx
))
648 memcpy(out
, EVP_CIPHER_CTX_buf_noconst(ctx
),
649 EVP_CCM_TLS_EXPLICIT_IV_LEN
);
650 /* Get rest of IV from explicit IV */
651 memcpy(ctx
->iv
+ EVP_CCM_TLS_FIXED_IV_LEN
, in
,
652 EVP_CCM_TLS_EXPLICIT_IV_LEN
);
653 /* Correct length value */
654 len
-= EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->M
;
655 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
,
659 CRYPTO_ccm128_aad(ccm
, EVP_CIPHER_CTX_buf_noconst(ctx
),
661 /* Fix buffer to point to payload */
662 in
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
663 out
+= EVP_CCM_TLS_EXPLICIT_IV_LEN
;
664 if (EVP_CIPHER_CTX_is_encrypting(ctx
)) {
665 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
666 : CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
668 if (!CRYPTO_ccm128_tag(ccm
, out
+ len
, cctx
->M
))
670 return len
+ EVP_CCM_TLS_EXPLICIT_IV_LEN
+ cctx
->M
;
672 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
673 : !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
674 unsigned char tag
[16];
675 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
676 if (!CRYPTO_memcmp(tag
, in
+ len
, cctx
->M
))
680 OPENSSL_cleanse(out
, len
);
685 static int aria_ccm_cipher(EVP_CIPHER_CTX
*ctx
, unsigned char *out
,
686 const unsigned char *in
, size_t len
)
688 EVP_ARIA_CCM_CTX
*cctx
= EVP_C_DATA(EVP_ARIA_CCM_CTX
, ctx
);
689 CCM128_CONTEXT
*ccm
= &cctx
->ccm
;
691 /* If not set up, return error */
695 if (cctx
->tls_aad_len
>= 0)
696 return aria_ccm_tls_cipher(ctx
, out
, in
, len
);
698 /* EVP_*Final() doesn't return any data */
699 if (in
== NULL
&& out
!= NULL
)
707 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
, len
))
712 /* If have AAD need message length */
713 if (!cctx
->len_set
&& len
)
715 CRYPTO_ccm128_aad(ccm
, in
, len
);
719 /* The tag must be set before actually decrypting data */
720 if (!EVP_CIPHER_CTX_is_encrypting(ctx
) && !cctx
->tag_set
)
723 /* If not set length yet do it */
724 if (!cctx
->len_set
) {
725 if (CRYPTO_ccm128_setiv(ccm
, ctx
->iv
, 15 - cctx
->L
, len
))
729 if (EVP_CIPHER_CTX_is_encrypting(ctx
)) {
730 if (cctx
->str
? CRYPTO_ccm128_encrypt_ccm64(ccm
, in
, out
, len
, cctx
->str
)
731 : CRYPTO_ccm128_encrypt(ccm
, in
, out
, len
))
737 if (cctx
->str
? !CRYPTO_ccm128_decrypt_ccm64(ccm
, in
, out
, len
,
739 !CRYPTO_ccm128_decrypt(ccm
, in
, out
, len
)) {
740 unsigned char tag
[16];
741 if (CRYPTO_ccm128_tag(ccm
, tag
, cctx
->M
)) {
742 if (!CRYPTO_memcmp(tag
, EVP_CIPHER_CTX_buf_noconst(ctx
),
748 OPENSSL_cleanse(out
, len
);
756 #define aria_ccm_cleanup NULL
758 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
759 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
760 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
761 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER \
762 | EVP_CIPH_CUSTOM_IV_LENGTH)
764 #define BLOCK_CIPHER_aead(keylen,mode,MODE) \
765 static const EVP_CIPHER aria_##keylen##_##mode = { \
766 NID_aria_##keylen##_##mode, \
768 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
770 aria_##mode##_init_key, \
771 aria_##mode##_cipher, \
772 aria_##mode##_cleanup, \
773 sizeof(EVP_ARIA_##MODE##_CTX), \
774 NULL,NULL,aria_##mode##_ctrl,NULL }; \
775 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
776 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
778 BLOCK_CIPHER_aead(128, gcm
, GCM
)
779 BLOCK_CIPHER_aead(192, gcm
, GCM
)
780 BLOCK_CIPHER_aead(256, gcm
, GCM
)
782 BLOCK_CIPHER_aead(128, ccm
, CCM
)
783 BLOCK_CIPHER_aead(192, ccm
, CCM
)
784 BLOCK_CIPHER_aead(256, ccm
, CCM
)