]> git.ipfire.org Git - thirdparty/openssl.git/blob - crypto/evp/e_aria.c
Update copyright year
[thirdparty/openssl.git] / crypto / evp / e_aria.c
1 /*
2 * Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
4 *
5 * Licensed under the Apache License 2.0 (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
9 */
10
11 #include "internal/cryptlib.h"
12 #ifndef OPENSSL_NO_ARIA
13 # include <openssl/evp.h>
14 # include <openssl/modes.h>
15 # include <openssl/rand.h>
16 # include "crypto/aria.h"
17 # include "crypto/evp.h"
18 # include "crypto/modes.h"
19 # include "evp_local.h"
20
21 /* ARIA subkey Structure */
22 typedef struct {
23 ARIA_KEY ks;
24 } EVP_ARIA_KEY;
25
26 /* ARIA GCM context */
27 typedef struct {
28 union {
29 OSSL_UNION_ALIGN;
30 ARIA_KEY ks;
31 } ks; /* ARIA subkey to use */
32 int key_set; /* Set if key initialised */
33 int iv_set; /* Set if an iv is set */
34 GCM128_CONTEXT gcm;
35 unsigned char *iv; /* Temporary IV store */
36 int ivlen; /* IV length */
37 int taglen;
38 int iv_gen; /* It is OK to generate IVs */
39 int tls_aad_len; /* TLS AAD length */
40 } EVP_ARIA_GCM_CTX;
41
42 /* ARIA CCM context */
43 typedef struct {
44 union {
45 OSSL_UNION_ALIGN;
46 ARIA_KEY ks;
47 } ks; /* ARIA key schedule to use */
48 int key_set; /* Set if key initialised */
49 int iv_set; /* Set if an iv is set */
50 int tag_set; /* Set if tag is valid */
51 int len_set; /* Set if message length set */
52 int L, M; /* L and M parameters from RFC3610 */
53 int tls_aad_len; /* TLS AAD length */
54 CCM128_CONTEXT ccm;
55 ccm128_f str;
56 } EVP_ARIA_CCM_CTX;
57
58 /* The subkey for ARIA is generated. */
59 static int aria_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
60 const unsigned char *iv, int enc)
61 {
62 int ret;
63 int mode = EVP_CIPHER_CTX_mode(ctx);
64
65 if (enc || (mode != EVP_CIPH_ECB_MODE && mode != EVP_CIPH_CBC_MODE))
66 ret = ossl_aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
67 EVP_CIPHER_CTX_get_cipher_data(ctx));
68 else
69 ret = ossl_aria_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
70 EVP_CIPHER_CTX_get_cipher_data(ctx));
71 if (ret < 0) {
72 ERR_raise(ERR_LIB_EVP,EVP_R_ARIA_KEY_SETUP_FAILED);
73 return 0;
74 }
75 return 1;
76 }
77
78 static void aria_cbc_encrypt(const unsigned char *in, unsigned char *out,
79 size_t len, const ARIA_KEY *key,
80 unsigned char *ivec, const int enc)
81 {
82
83 if (enc)
84 CRYPTO_cbc128_encrypt(in, out, len, key, ivec,
85 (block128_f) ossl_aria_encrypt);
86 else
87 CRYPTO_cbc128_decrypt(in, out, len, key, ivec,
88 (block128_f) ossl_aria_encrypt);
89 }
90
91 static void aria_cfb128_encrypt(const unsigned char *in, unsigned char *out,
92 size_t length, const ARIA_KEY *key,
93 unsigned char *ivec, int *num, const int enc)
94 {
95
96 CRYPTO_cfb128_encrypt(in, out, length, key, ivec, num, enc,
97 (block128_f) ossl_aria_encrypt);
98 }
99
100 static void aria_cfb1_encrypt(const unsigned char *in, unsigned char *out,
101 size_t length, const ARIA_KEY *key,
102 unsigned char *ivec, int *num, const int enc)
103 {
104 CRYPTO_cfb128_1_encrypt(in, out, length, key, ivec, num, enc,
105 (block128_f) ossl_aria_encrypt);
106 }
107
108 static void aria_cfb8_encrypt(const unsigned char *in, unsigned char *out,
109 size_t length, const ARIA_KEY *key,
110 unsigned char *ivec, int *num, const int enc)
111 {
112 CRYPTO_cfb128_8_encrypt(in, out, length, key, ivec, num, enc,
113 (block128_f) ossl_aria_encrypt);
114 }
115
116 static void aria_ecb_encrypt(const unsigned char *in, unsigned char *out,
117 const ARIA_KEY *key, const int enc)
118 {
119 ossl_aria_encrypt(in, out, key);
120 }
121
122 static void aria_ofb128_encrypt(const unsigned char *in, unsigned char *out,
123 size_t length, const ARIA_KEY *key,
124 unsigned char *ivec, int *num)
125 {
126 CRYPTO_ofb128_encrypt(in, out, length, key, ivec, num,
127 (block128_f) ossl_aria_encrypt);
128 }
129
130 IMPLEMENT_BLOCK_CIPHER(aria_128, ks, aria, EVP_ARIA_KEY,
131 NID_aria_128, 16, 16, 16, 128,
132 0, aria_init_key, NULL,
133 EVP_CIPHER_set_asn1_iv,
134 EVP_CIPHER_get_asn1_iv,
135 NULL)
136 IMPLEMENT_BLOCK_CIPHER(aria_192, ks, aria, EVP_ARIA_KEY,
137 NID_aria_192, 16, 24, 16, 128,
138 0, aria_init_key, NULL,
139 EVP_CIPHER_set_asn1_iv,
140 EVP_CIPHER_get_asn1_iv,
141 NULL)
142 IMPLEMENT_BLOCK_CIPHER(aria_256, ks, aria, EVP_ARIA_KEY,
143 NID_aria_256, 16, 32, 16, 128,
144 0, aria_init_key, NULL,
145 EVP_CIPHER_set_asn1_iv,
146 EVP_CIPHER_get_asn1_iv,
147 NULL)
148
149 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
150 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
151 IMPLEMENT_ARIA_CFBR(128,1)
152 IMPLEMENT_ARIA_CFBR(192,1)
153 IMPLEMENT_ARIA_CFBR(256,1)
154 IMPLEMENT_ARIA_CFBR(128,8)
155 IMPLEMENT_ARIA_CFBR(192,8)
156 IMPLEMENT_ARIA_CFBR(256,8)
157
158 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
159 static const EVP_CIPHER aria_##keylen##_##mode = { \
160 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
161 flags|EVP_CIPH_##MODE##_MODE, \
162 aria_init_key, \
163 aria_##mode##_cipher, \
164 NULL, \
165 sizeof(EVP_ARIA_KEY), \
166 NULL,NULL,NULL,NULL }; \
167 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
168 { return &aria_##keylen##_##mode; }
169
170 static int aria_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
171 const unsigned char *in, size_t len)
172 {
173 unsigned int num = EVP_CIPHER_CTX_num(ctx);
174 EVP_ARIA_KEY *dat = EVP_C_DATA(EVP_ARIA_KEY, ctx);
175
176 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv,
177 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
178 (block128_f) ossl_aria_encrypt);
179 EVP_CIPHER_CTX_set_num(ctx, num);
180 return 1;
181 }
182
183 BLOCK_CIPHER_generic(NID_aria, 128, 1, 16, ctr, ctr, CTR, 0)
184 BLOCK_CIPHER_generic(NID_aria, 192, 1, 16, ctr, ctr, CTR, 0)
185 BLOCK_CIPHER_generic(NID_aria, 256, 1, 16, ctr, ctr, CTR, 0)
186
187 /* Authenticated cipher modes (GCM/CCM) */
188
189 /* increment counter (64-bit int) by 1 */
190 static void ctr64_inc(unsigned char *counter)
191 {
192 int n = 8;
193 unsigned char c;
194
195 do {
196 --n;
197 c = counter[n];
198 ++c;
199 counter[n] = c;
200 if (c)
201 return;
202 } while (n);
203 }
204
205 static int aria_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
206 const unsigned char *iv, int enc)
207 {
208 int ret;
209 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
210
211 if (!iv && !key)
212 return 1;
213 if (key) {
214 ret = ossl_aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
215 &gctx->ks.ks);
216 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
217 (block128_f) ossl_aria_encrypt);
218 if (ret < 0) {
219 ERR_raise(ERR_LIB_EVP,EVP_R_ARIA_KEY_SETUP_FAILED);
220 return 0;
221 }
222
223 /*
224 * If we have an iv can set it directly, otherwise use saved IV.
225 */
226 if (iv == NULL && gctx->iv_set)
227 iv = gctx->iv;
228 if (iv) {
229 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
230 gctx->iv_set = 1;
231 }
232 gctx->key_set = 1;
233 } else {
234 /* If key set use IV, otherwise copy */
235 if (gctx->key_set)
236 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
237 else
238 memcpy(gctx->iv, iv, gctx->ivlen);
239 gctx->iv_set = 1;
240 gctx->iv_gen = 0;
241 }
242 return 1;
243 }
244
245 static int aria_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
246 {
247 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,c);
248
249 switch (type) {
250 case EVP_CTRL_INIT:
251 gctx->key_set = 0;
252 gctx->iv_set = 0;
253 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
254 gctx->iv = c->iv;
255 gctx->taglen = -1;
256 gctx->iv_gen = 0;
257 gctx->tls_aad_len = -1;
258 return 1;
259
260 case EVP_CTRL_GET_IVLEN:
261 *(int *)ptr = gctx->ivlen;
262 return 1;
263
264 case EVP_CTRL_AEAD_SET_IVLEN:
265 if (arg <= 0)
266 return 0;
267 /* Allocate memory for IV if needed */
268 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
269 if (gctx->iv != c->iv)
270 OPENSSL_free(gctx->iv);
271 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
272 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
273 return 0;
274 }
275 }
276 gctx->ivlen = arg;
277 return 1;
278
279 case EVP_CTRL_AEAD_SET_TAG:
280 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
281 return 0;
282 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
283 gctx->taglen = arg;
284 return 1;
285
286 case EVP_CTRL_AEAD_GET_TAG:
287 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
288 || gctx->taglen < 0)
289 return 0;
290 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
291 return 1;
292
293 case EVP_CTRL_GCM_SET_IV_FIXED:
294 /* Special case: -1 length restores whole IV */
295 if (arg == -1) {
296 memcpy(gctx->iv, ptr, gctx->ivlen);
297 gctx->iv_gen = 1;
298 return 1;
299 }
300 /*
301 * Fixed field must be at least 4 bytes and invocation field at least
302 * 8.
303 */
304 if ((arg < 4) || (gctx->ivlen - arg) < 8)
305 return 0;
306 if (arg)
307 memcpy(gctx->iv, ptr, arg);
308 if (EVP_CIPHER_CTX_encrypting(c)
309 && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
310 return 0;
311 gctx->iv_gen = 1;
312 return 1;
313
314 case EVP_CTRL_GCM_IV_GEN:
315 if (gctx->iv_gen == 0 || gctx->key_set == 0)
316 return 0;
317 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
318 if (arg <= 0 || arg > gctx->ivlen)
319 arg = gctx->ivlen;
320 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
321 /*
322 * Invocation field will be at least 8 bytes in size and so no need
323 * to check wrap around or increment more than last 8 bytes.
324 */
325 ctr64_inc(gctx->iv + gctx->ivlen - 8);
326 gctx->iv_set = 1;
327 return 1;
328
329 case EVP_CTRL_GCM_SET_IV_INV:
330 if (gctx->iv_gen == 0 || gctx->key_set == 0
331 || EVP_CIPHER_CTX_encrypting(c))
332 return 0;
333 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
334 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
335 gctx->iv_set = 1;
336 return 1;
337
338 case EVP_CTRL_AEAD_TLS1_AAD:
339 /* Save the AAD for later use */
340 if (arg != EVP_AEAD_TLS1_AAD_LEN)
341 return 0;
342 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
343 gctx->tls_aad_len = arg;
344 {
345 unsigned int len =
346 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
347 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
348 /* Correct length for explicit IV */
349 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
350 return 0;
351 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
352 /* If decrypting correct for tag too */
353 if (!EVP_CIPHER_CTX_encrypting(c)) {
354 if (len < EVP_GCM_TLS_TAG_LEN)
355 return 0;
356 len -= EVP_GCM_TLS_TAG_LEN;
357 }
358 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
359 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
360 }
361 /* Extra padding: tag appended to record */
362 return EVP_GCM_TLS_TAG_LEN;
363
364 case EVP_CTRL_COPY:
365 {
366 EVP_CIPHER_CTX *out = ptr;
367 EVP_ARIA_GCM_CTX *gctx_out = EVP_C_DATA(EVP_ARIA_GCM_CTX,out);
368 if (gctx->gcm.key) {
369 if (gctx->gcm.key != &gctx->ks)
370 return 0;
371 gctx_out->gcm.key = &gctx_out->ks;
372 }
373 if (gctx->iv == c->iv)
374 gctx_out->iv = out->iv;
375 else {
376 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
377 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
378 return 0;
379 }
380 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
381 }
382 return 1;
383 }
384
385 default:
386 return -1;
387
388 }
389 }
390
391 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
392 const unsigned char *in, size_t len)
393 {
394 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
395 int rv = -1;
396
397 /* Encrypt/decrypt must be performed in place */
398 if (out != in
399 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
400 return -1;
401 /*
402 * Set IV from start of buffer or generate IV and write to start of
403 * buffer.
404 */
405 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
406 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
407 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
408 goto err;
409 /* Use saved AAD */
410 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
411 gctx->tls_aad_len))
412 goto err;
413 /* Fix buffer and length to point to payload */
414 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
415 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
416 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
417 if (EVP_CIPHER_CTX_encrypting(ctx)) {
418 /* Encrypt payload */
419 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
420 goto err;
421 out += len;
422 /* Finally write tag */
423 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
424 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
425 } else {
426 /* Decrypt */
427 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
428 goto err;
429 /* Retrieve tag */
430 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
431 EVP_GCM_TLS_TAG_LEN);
432 /* If tag mismatch wipe buffer */
433 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
434 EVP_GCM_TLS_TAG_LEN)) {
435 OPENSSL_cleanse(out, len);
436 goto err;
437 }
438 rv = len;
439 }
440
441 err:
442 gctx->iv_set = 0;
443 gctx->tls_aad_len = -1;
444 return rv;
445 }
446
447 static int aria_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
448 const unsigned char *in, size_t len)
449 {
450 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
451
452 /* If not set up, return error */
453 if (!gctx->key_set)
454 return -1;
455
456 if (gctx->tls_aad_len >= 0)
457 return aria_gcm_tls_cipher(ctx, out, in, len);
458
459 if (!gctx->iv_set)
460 return -1;
461 if (in) {
462 if (out == NULL) {
463 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
464 return -1;
465 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
466 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
467 return -1;
468 } else {
469 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
470 return -1;
471 }
472 return len;
473 }
474 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
475 if (gctx->taglen < 0)
476 return -1;
477 if (CRYPTO_gcm128_finish(&gctx->gcm,
478 EVP_CIPHER_CTX_buf_noconst(ctx),
479 gctx->taglen) != 0)
480 return -1;
481 gctx->iv_set = 0;
482 return 0;
483 }
484 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
485 gctx->taglen = 16;
486 /* Don't reuse the IV */
487 gctx->iv_set = 0;
488 return 0;
489 }
490
491 static int aria_gcm_cleanup(EVP_CIPHER_CTX *ctx)
492 {
493 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, ctx);
494
495 if (gctx->iv != ctx->iv)
496 OPENSSL_free(gctx->iv);
497
498 return 1;
499 }
500
501 static int aria_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
502 const unsigned char *iv, int enc)
503 {
504 int ret;
505 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
506
507 if (!iv && !key)
508 return 1;
509
510 if (key) {
511 ret = ossl_aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
512 &cctx->ks.ks);
513 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
514 &cctx->ks, (block128_f) ossl_aria_encrypt);
515 if (ret < 0) {
516 ERR_raise(ERR_LIB_EVP,EVP_R_ARIA_KEY_SETUP_FAILED);
517 return 0;
518 }
519 cctx->str = NULL;
520 cctx->key_set = 1;
521 }
522 if (iv) {
523 memcpy(ctx->iv, iv, 15 - cctx->L);
524 cctx->iv_set = 1;
525 }
526 return 1;
527 }
528
529 static int aria_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
530 {
531 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,c);
532
533 switch (type) {
534 case EVP_CTRL_INIT:
535 cctx->key_set = 0;
536 cctx->iv_set = 0;
537 cctx->L = 8;
538 cctx->M = 12;
539 cctx->tag_set = 0;
540 cctx->len_set = 0;
541 cctx->tls_aad_len = -1;
542 return 1;
543
544 case EVP_CTRL_GET_IVLEN:
545 *(int *)ptr = 15 - cctx->L;
546 return 1;
547
548 case EVP_CTRL_AEAD_TLS1_AAD:
549 /* Save the AAD for later use */
550 if (arg != EVP_AEAD_TLS1_AAD_LEN)
551 return 0;
552 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
553 cctx->tls_aad_len = arg;
554 {
555 uint16_t len =
556 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
557 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
558 /* Correct length for explicit IV */
559 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
560 return 0;
561 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
562 /* If decrypting correct for tag too */
563 if (!EVP_CIPHER_CTX_encrypting(c)) {
564 if (len < cctx->M)
565 return 0;
566 len -= cctx->M;
567 }
568 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
569 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
570 }
571 /* Extra padding: tag appended to record */
572 return cctx->M;
573
574 case EVP_CTRL_CCM_SET_IV_FIXED:
575 /* Sanity check length */
576 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
577 return 0;
578 /* Just copy to first part of IV */
579 memcpy(c->iv, ptr, arg);
580 return 1;
581
582 case EVP_CTRL_AEAD_SET_IVLEN:
583 arg = 15 - arg;
584 /* fall thru */
585 case EVP_CTRL_CCM_SET_L:
586 if (arg < 2 || arg > 8)
587 return 0;
588 cctx->L = arg;
589 return 1;
590 case EVP_CTRL_AEAD_SET_TAG:
591 if ((arg & 1) || arg < 4 || arg > 16)
592 return 0;
593 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
594 return 0;
595 if (ptr) {
596 cctx->tag_set = 1;
597 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
598 }
599 cctx->M = arg;
600 return 1;
601
602 case EVP_CTRL_AEAD_GET_TAG:
603 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
604 return 0;
605 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
606 return 0;
607 cctx->tag_set = 0;
608 cctx->iv_set = 0;
609 cctx->len_set = 0;
610 return 1;
611
612 case EVP_CTRL_COPY:
613 {
614 EVP_CIPHER_CTX *out = ptr;
615 EVP_ARIA_CCM_CTX *cctx_out = EVP_C_DATA(EVP_ARIA_CCM_CTX,out);
616 if (cctx->ccm.key) {
617 if (cctx->ccm.key != &cctx->ks)
618 return 0;
619 cctx_out->ccm.key = &cctx_out->ks;
620 }
621 return 1;
622 }
623
624 default:
625 return -1;
626 }
627 }
628
629 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
630 const unsigned char *in, size_t len)
631 {
632 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
633 CCM128_CONTEXT *ccm = &cctx->ccm;
634
635 /* Encrypt/decrypt must be performed in place */
636 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
637 return -1;
638 /* If encrypting set explicit IV from sequence number (start of AAD) */
639 if (EVP_CIPHER_CTX_encrypting(ctx))
640 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
641 EVP_CCM_TLS_EXPLICIT_IV_LEN);
642 /* Get rest of IV from explicit IV */
643 memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
644 EVP_CCM_TLS_EXPLICIT_IV_LEN);
645 /* Correct length value */
646 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
647 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
648 len))
649 return -1;
650 /* Use saved AAD */
651 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
652 /* Fix buffer to point to payload */
653 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
654 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
655 if (EVP_CIPHER_CTX_encrypting(ctx)) {
656 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
657 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
658 return -1;
659 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
660 return -1;
661 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
662 } else {
663 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, cctx->str)
664 : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
665 unsigned char tag[16];
666 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
667 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
668 return len;
669 }
670 }
671 OPENSSL_cleanse(out, len);
672 return -1;
673 }
674 }
675
676 static int aria_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
677 const unsigned char *in, size_t len)
678 {
679 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
680 CCM128_CONTEXT *ccm = &cctx->ccm;
681
682 /* If not set up, return error */
683 if (!cctx->key_set)
684 return -1;
685
686 if (cctx->tls_aad_len >= 0)
687 return aria_ccm_tls_cipher(ctx, out, in, len);
688
689 /* EVP_*Final() doesn't return any data */
690 if (in == NULL && out != NULL)
691 return 0;
692
693 if (!cctx->iv_set)
694 return -1;
695
696 if (!out) {
697 if (!in) {
698 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
699 return -1;
700 cctx->len_set = 1;
701 return len;
702 }
703 /* If have AAD need message length */
704 if (!cctx->len_set && len)
705 return -1;
706 CRYPTO_ccm128_aad(ccm, in, len);
707 return len;
708 }
709
710 /* The tag must be set before actually decrypting data */
711 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
712 return -1;
713
714 /* If not set length yet do it */
715 if (!cctx->len_set) {
716 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
717 return -1;
718 cctx->len_set = 1;
719 }
720 if (EVP_CIPHER_CTX_encrypting(ctx)) {
721 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
722 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
723 return -1;
724 cctx->tag_set = 1;
725 return len;
726 } else {
727 int rv = -1;
728 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
729 cctx->str) :
730 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
731 unsigned char tag[16];
732 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
733 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
734 cctx->M))
735 rv = len;
736 }
737 }
738 if (rv == -1)
739 OPENSSL_cleanse(out, len);
740 cctx->iv_set = 0;
741 cctx->tag_set = 0;
742 cctx->len_set = 0;
743 return rv;
744 }
745 }
746
747 #define aria_ccm_cleanup NULL
748
749 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
750 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
751 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
752 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER \
753 | EVP_CIPH_CUSTOM_IV_LENGTH)
754
755 #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
756 static const EVP_CIPHER aria_##keylen##_##mode = { \
757 nid##_##keylen##_##nmode, \
758 blocksize, keylen/8, ivlen, \
759 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
760 aria_##mode##_init_key, \
761 aria_##mode##_cipher, \
762 aria_##mode##_cleanup, \
763 sizeof(EVP_ARIA_##MODE##_CTX), \
764 NULL,NULL,aria_##mode##_ctrl,NULL }; \
765 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
766 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
767
768 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, gcm, gcm, GCM, 0)
769 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, gcm, gcm, GCM, 0)
770 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, gcm, gcm, GCM, 0)
771
772 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, ccm, ccm, CCM, 0)
773 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, ccm, ccm, CCM, 0)
774 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, ccm, ccm, CCM, 0)
775
776 #endif