]> git.ipfire.org Git - thirdparty/openssl.git/blob - crypto/evp/e_aria.c
Add "origin" field to EVP_CIPHER, EVP_MD
[thirdparty/openssl.git] / crypto / evp / e_aria.c
1 /*
2 * Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
4 *
5 * Licensed under the Apache License 2.0 (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
9 */
10
11 #include "internal/cryptlib.h"
12 #ifndef OPENSSL_NO_ARIA
13 # include <openssl/evp.h>
14 # include <openssl/modes.h>
15 # include <openssl/rand.h>
16 # include "crypto/aria.h"
17 # include "crypto/evp.h"
18 # include "crypto/modes.h"
19 # include "evp_local.h"
20
21 /* ARIA subkey Structure */
22 typedef struct {
23 ARIA_KEY ks;
24 } EVP_ARIA_KEY;
25
26 /* ARIA GCM context */
27 typedef struct {
28 union {
29 OSSL_UNION_ALIGN;
30 ARIA_KEY ks;
31 } ks; /* ARIA subkey to use */
32 int key_set; /* Set if key initialised */
33 int iv_set; /* Set if an iv is set */
34 GCM128_CONTEXT gcm;
35 unsigned char *iv; /* Temporary IV store */
36 int ivlen; /* IV length */
37 int taglen;
38 int iv_gen; /* It is OK to generate IVs */
39 int tls_aad_len; /* TLS AAD length */
40 } EVP_ARIA_GCM_CTX;
41
42 /* ARIA CCM context */
43 typedef struct {
44 union {
45 OSSL_UNION_ALIGN;
46 ARIA_KEY ks;
47 } ks; /* ARIA key schedule to use */
48 int key_set; /* Set if key initialised */
49 int iv_set; /* Set if an iv is set */
50 int tag_set; /* Set if tag is valid */
51 int len_set; /* Set if message length set */
52 int L, M; /* L and M parameters from RFC3610 */
53 int tls_aad_len; /* TLS AAD length */
54 CCM128_CONTEXT ccm;
55 ccm128_f str;
56 } EVP_ARIA_CCM_CTX;
57
58 /* The subkey for ARIA is generated. */
59 static int aria_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
60 const unsigned char *iv, int enc)
61 {
62 int ret;
63 int mode = EVP_CIPHER_CTX_mode(ctx);
64
65 if (enc || (mode != EVP_CIPH_ECB_MODE && mode != EVP_CIPH_CBC_MODE))
66 ret = ossl_aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
67 EVP_CIPHER_CTX_get_cipher_data(ctx));
68 else
69 ret = ossl_aria_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
70 EVP_CIPHER_CTX_get_cipher_data(ctx));
71 if (ret < 0) {
72 ERR_raise(ERR_LIB_EVP,EVP_R_ARIA_KEY_SETUP_FAILED);
73 return 0;
74 }
75 return 1;
76 }
77
78 static void aria_cbc_encrypt(const unsigned char *in, unsigned char *out,
79 size_t len, const ARIA_KEY *key,
80 unsigned char *ivec, const int enc)
81 {
82
83 if (enc)
84 CRYPTO_cbc128_encrypt(in, out, len, key, ivec,
85 (block128_f) ossl_aria_encrypt);
86 else
87 CRYPTO_cbc128_decrypt(in, out, len, key, ivec,
88 (block128_f) ossl_aria_encrypt);
89 }
90
91 static void aria_cfb128_encrypt(const unsigned char *in, unsigned char *out,
92 size_t length, const ARIA_KEY *key,
93 unsigned char *ivec, int *num, const int enc)
94 {
95
96 CRYPTO_cfb128_encrypt(in, out, length, key, ivec, num, enc,
97 (block128_f) ossl_aria_encrypt);
98 }
99
100 static void aria_cfb1_encrypt(const unsigned char *in, unsigned char *out,
101 size_t length, const ARIA_KEY *key,
102 unsigned char *ivec, int *num, const int enc)
103 {
104 CRYPTO_cfb128_1_encrypt(in, out, length, key, ivec, num, enc,
105 (block128_f) ossl_aria_encrypt);
106 }
107
108 static void aria_cfb8_encrypt(const unsigned char *in, unsigned char *out,
109 size_t length, const ARIA_KEY *key,
110 unsigned char *ivec, int *num, const int enc)
111 {
112 CRYPTO_cfb128_8_encrypt(in, out, length, key, ivec, num, enc,
113 (block128_f) ossl_aria_encrypt);
114 }
115
116 static void aria_ecb_encrypt(const unsigned char *in, unsigned char *out,
117 const ARIA_KEY *key, const int enc)
118 {
119 ossl_aria_encrypt(in, out, key);
120 }
121
122 static void aria_ofb128_encrypt(const unsigned char *in, unsigned char *out,
123 size_t length, const ARIA_KEY *key,
124 unsigned char *ivec, int *num)
125 {
126 CRYPTO_ofb128_encrypt(in, out, length, key, ivec, num,
127 (block128_f) ossl_aria_encrypt);
128 }
129
130 IMPLEMENT_BLOCK_CIPHER(aria_128, ks, aria, EVP_ARIA_KEY,
131 NID_aria_128, 16, 16, 16, 128,
132 0, aria_init_key, NULL,
133 EVP_CIPHER_set_asn1_iv,
134 EVP_CIPHER_get_asn1_iv,
135 NULL)
136 IMPLEMENT_BLOCK_CIPHER(aria_192, ks, aria, EVP_ARIA_KEY,
137 NID_aria_192, 16, 24, 16, 128,
138 0, aria_init_key, NULL,
139 EVP_CIPHER_set_asn1_iv,
140 EVP_CIPHER_get_asn1_iv,
141 NULL)
142 IMPLEMENT_BLOCK_CIPHER(aria_256, ks, aria, EVP_ARIA_KEY,
143 NID_aria_256, 16, 32, 16, 128,
144 0, aria_init_key, NULL,
145 EVP_CIPHER_set_asn1_iv,
146 EVP_CIPHER_get_asn1_iv,
147 NULL)
148
149 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
150 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
151 IMPLEMENT_ARIA_CFBR(128,1)
152 IMPLEMENT_ARIA_CFBR(192,1)
153 IMPLEMENT_ARIA_CFBR(256,1)
154 IMPLEMENT_ARIA_CFBR(128,8)
155 IMPLEMENT_ARIA_CFBR(192,8)
156 IMPLEMENT_ARIA_CFBR(256,8)
157
158 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
159 static const EVP_CIPHER aria_##keylen##_##mode = { \
160 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
161 flags|EVP_CIPH_##MODE##_MODE, \
162 EVP_ORIG_GLOBAL, \
163 aria_init_key, \
164 aria_##mode##_cipher, \
165 NULL, \
166 sizeof(EVP_ARIA_KEY), \
167 NULL,NULL,NULL,NULL }; \
168 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
169 { return &aria_##keylen##_##mode; }
170
171 static int aria_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
172 const unsigned char *in, size_t len)
173 {
174 unsigned int num = EVP_CIPHER_CTX_num(ctx);
175 EVP_ARIA_KEY *dat = EVP_C_DATA(EVP_ARIA_KEY, ctx);
176
177 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv,
178 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
179 (block128_f) ossl_aria_encrypt);
180 EVP_CIPHER_CTX_set_num(ctx, num);
181 return 1;
182 }
183
184 BLOCK_CIPHER_generic(NID_aria, 128, 1, 16, ctr, ctr, CTR, 0)
185 BLOCK_CIPHER_generic(NID_aria, 192, 1, 16, ctr, ctr, CTR, 0)
186 BLOCK_CIPHER_generic(NID_aria, 256, 1, 16, ctr, ctr, CTR, 0)
187
188 /* Authenticated cipher modes (GCM/CCM) */
189
190 /* increment counter (64-bit int) by 1 */
191 static void ctr64_inc(unsigned char *counter)
192 {
193 int n = 8;
194 unsigned char c;
195
196 do {
197 --n;
198 c = counter[n];
199 ++c;
200 counter[n] = c;
201 if (c)
202 return;
203 } while (n);
204 }
205
206 static int aria_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
207 const unsigned char *iv, int enc)
208 {
209 int ret;
210 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
211
212 if (!iv && !key)
213 return 1;
214 if (key) {
215 ret = ossl_aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
216 &gctx->ks.ks);
217 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
218 (block128_f) ossl_aria_encrypt);
219 if (ret < 0) {
220 ERR_raise(ERR_LIB_EVP,EVP_R_ARIA_KEY_SETUP_FAILED);
221 return 0;
222 }
223
224 /*
225 * If we have an iv can set it directly, otherwise use saved IV.
226 */
227 if (iv == NULL && gctx->iv_set)
228 iv = gctx->iv;
229 if (iv) {
230 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
231 gctx->iv_set = 1;
232 }
233 gctx->key_set = 1;
234 } else {
235 /* If key set use IV, otherwise copy */
236 if (gctx->key_set)
237 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
238 else
239 memcpy(gctx->iv, iv, gctx->ivlen);
240 gctx->iv_set = 1;
241 gctx->iv_gen = 0;
242 }
243 return 1;
244 }
245
246 static int aria_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
247 {
248 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,c);
249
250 switch (type) {
251 case EVP_CTRL_INIT:
252 gctx->key_set = 0;
253 gctx->iv_set = 0;
254 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
255 gctx->iv = c->iv;
256 gctx->taglen = -1;
257 gctx->iv_gen = 0;
258 gctx->tls_aad_len = -1;
259 return 1;
260
261 case EVP_CTRL_GET_IVLEN:
262 *(int *)ptr = gctx->ivlen;
263 return 1;
264
265 case EVP_CTRL_AEAD_SET_IVLEN:
266 if (arg <= 0)
267 return 0;
268 /* Allocate memory for IV if needed */
269 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
270 if (gctx->iv != c->iv)
271 OPENSSL_free(gctx->iv);
272 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
273 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
274 return 0;
275 }
276 }
277 gctx->ivlen = arg;
278 return 1;
279
280 case EVP_CTRL_AEAD_SET_TAG:
281 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
282 return 0;
283 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
284 gctx->taglen = arg;
285 return 1;
286
287 case EVP_CTRL_AEAD_GET_TAG:
288 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
289 || gctx->taglen < 0)
290 return 0;
291 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
292 return 1;
293
294 case EVP_CTRL_GCM_SET_IV_FIXED:
295 /* Special case: -1 length restores whole IV */
296 if (arg == -1) {
297 memcpy(gctx->iv, ptr, gctx->ivlen);
298 gctx->iv_gen = 1;
299 return 1;
300 }
301 /*
302 * Fixed field must be at least 4 bytes and invocation field at least
303 * 8.
304 */
305 if ((arg < 4) || (gctx->ivlen - arg) < 8)
306 return 0;
307 if (arg)
308 memcpy(gctx->iv, ptr, arg);
309 if (EVP_CIPHER_CTX_encrypting(c)
310 && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
311 return 0;
312 gctx->iv_gen = 1;
313 return 1;
314
315 case EVP_CTRL_GCM_IV_GEN:
316 if (gctx->iv_gen == 0 || gctx->key_set == 0)
317 return 0;
318 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
319 if (arg <= 0 || arg > gctx->ivlen)
320 arg = gctx->ivlen;
321 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
322 /*
323 * Invocation field will be at least 8 bytes in size and so no need
324 * to check wrap around or increment more than last 8 bytes.
325 */
326 ctr64_inc(gctx->iv + gctx->ivlen - 8);
327 gctx->iv_set = 1;
328 return 1;
329
330 case EVP_CTRL_GCM_SET_IV_INV:
331 if (gctx->iv_gen == 0 || gctx->key_set == 0
332 || EVP_CIPHER_CTX_encrypting(c))
333 return 0;
334 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
335 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
336 gctx->iv_set = 1;
337 return 1;
338
339 case EVP_CTRL_AEAD_TLS1_AAD:
340 /* Save the AAD for later use */
341 if (arg != EVP_AEAD_TLS1_AAD_LEN)
342 return 0;
343 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
344 gctx->tls_aad_len = arg;
345 {
346 unsigned int len =
347 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
348 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
349 /* Correct length for explicit IV */
350 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
351 return 0;
352 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
353 /* If decrypting correct for tag too */
354 if (!EVP_CIPHER_CTX_encrypting(c)) {
355 if (len < EVP_GCM_TLS_TAG_LEN)
356 return 0;
357 len -= EVP_GCM_TLS_TAG_LEN;
358 }
359 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
360 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
361 }
362 /* Extra padding: tag appended to record */
363 return EVP_GCM_TLS_TAG_LEN;
364
365 case EVP_CTRL_COPY:
366 {
367 EVP_CIPHER_CTX *out = ptr;
368 EVP_ARIA_GCM_CTX *gctx_out = EVP_C_DATA(EVP_ARIA_GCM_CTX,out);
369 if (gctx->gcm.key) {
370 if (gctx->gcm.key != &gctx->ks)
371 return 0;
372 gctx_out->gcm.key = &gctx_out->ks;
373 }
374 if (gctx->iv == c->iv)
375 gctx_out->iv = out->iv;
376 else {
377 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
378 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
379 return 0;
380 }
381 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
382 }
383 return 1;
384 }
385
386 default:
387 return -1;
388
389 }
390 }
391
392 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
393 const unsigned char *in, size_t len)
394 {
395 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
396 int rv = -1;
397
398 /* Encrypt/decrypt must be performed in place */
399 if (out != in
400 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
401 return -1;
402 /*
403 * Set IV from start of buffer or generate IV and write to start of
404 * buffer.
405 */
406 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
407 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
408 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
409 goto err;
410 /* Use saved AAD */
411 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
412 gctx->tls_aad_len))
413 goto err;
414 /* Fix buffer and length to point to payload */
415 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
416 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
417 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
418 if (EVP_CIPHER_CTX_encrypting(ctx)) {
419 /* Encrypt payload */
420 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
421 goto err;
422 out += len;
423 /* Finally write tag */
424 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
425 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
426 } else {
427 /* Decrypt */
428 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
429 goto err;
430 /* Retrieve tag */
431 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
432 EVP_GCM_TLS_TAG_LEN);
433 /* If tag mismatch wipe buffer */
434 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
435 EVP_GCM_TLS_TAG_LEN)) {
436 OPENSSL_cleanse(out, len);
437 goto err;
438 }
439 rv = len;
440 }
441
442 err:
443 gctx->iv_set = 0;
444 gctx->tls_aad_len = -1;
445 return rv;
446 }
447
448 static int aria_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
449 const unsigned char *in, size_t len)
450 {
451 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
452
453 /* If not set up, return error */
454 if (!gctx->key_set)
455 return -1;
456
457 if (gctx->tls_aad_len >= 0)
458 return aria_gcm_tls_cipher(ctx, out, in, len);
459
460 if (!gctx->iv_set)
461 return -1;
462 if (in) {
463 if (out == NULL) {
464 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
465 return -1;
466 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
467 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
468 return -1;
469 } else {
470 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
471 return -1;
472 }
473 return len;
474 }
475 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
476 if (gctx->taglen < 0)
477 return -1;
478 if (CRYPTO_gcm128_finish(&gctx->gcm,
479 EVP_CIPHER_CTX_buf_noconst(ctx),
480 gctx->taglen) != 0)
481 return -1;
482 gctx->iv_set = 0;
483 return 0;
484 }
485 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
486 gctx->taglen = 16;
487 /* Don't reuse the IV */
488 gctx->iv_set = 0;
489 return 0;
490 }
491
492 static int aria_gcm_cleanup(EVP_CIPHER_CTX *ctx)
493 {
494 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, ctx);
495
496 if (gctx->iv != ctx->iv)
497 OPENSSL_free(gctx->iv);
498
499 return 1;
500 }
501
502 static int aria_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
503 const unsigned char *iv, int enc)
504 {
505 int ret;
506 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
507
508 if (!iv && !key)
509 return 1;
510
511 if (key) {
512 ret = ossl_aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
513 &cctx->ks.ks);
514 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
515 &cctx->ks, (block128_f) ossl_aria_encrypt);
516 if (ret < 0) {
517 ERR_raise(ERR_LIB_EVP,EVP_R_ARIA_KEY_SETUP_FAILED);
518 return 0;
519 }
520 cctx->str = NULL;
521 cctx->key_set = 1;
522 }
523 if (iv) {
524 memcpy(ctx->iv, iv, 15 - cctx->L);
525 cctx->iv_set = 1;
526 }
527 return 1;
528 }
529
530 static int aria_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
531 {
532 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,c);
533
534 switch (type) {
535 case EVP_CTRL_INIT:
536 cctx->key_set = 0;
537 cctx->iv_set = 0;
538 cctx->L = 8;
539 cctx->M = 12;
540 cctx->tag_set = 0;
541 cctx->len_set = 0;
542 cctx->tls_aad_len = -1;
543 return 1;
544
545 case EVP_CTRL_GET_IVLEN:
546 *(int *)ptr = 15 - cctx->L;
547 return 1;
548
549 case EVP_CTRL_AEAD_TLS1_AAD:
550 /* Save the AAD for later use */
551 if (arg != EVP_AEAD_TLS1_AAD_LEN)
552 return 0;
553 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
554 cctx->tls_aad_len = arg;
555 {
556 uint16_t len =
557 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
558 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
559 /* Correct length for explicit IV */
560 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
561 return 0;
562 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
563 /* If decrypting correct for tag too */
564 if (!EVP_CIPHER_CTX_encrypting(c)) {
565 if (len < cctx->M)
566 return 0;
567 len -= cctx->M;
568 }
569 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
570 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
571 }
572 /* Extra padding: tag appended to record */
573 return cctx->M;
574
575 case EVP_CTRL_CCM_SET_IV_FIXED:
576 /* Sanity check length */
577 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
578 return 0;
579 /* Just copy to first part of IV */
580 memcpy(c->iv, ptr, arg);
581 return 1;
582
583 case EVP_CTRL_AEAD_SET_IVLEN:
584 arg = 15 - arg;
585 /* fall thru */
586 case EVP_CTRL_CCM_SET_L:
587 if (arg < 2 || arg > 8)
588 return 0;
589 cctx->L = arg;
590 return 1;
591 case EVP_CTRL_AEAD_SET_TAG:
592 if ((arg & 1) || arg < 4 || arg > 16)
593 return 0;
594 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
595 return 0;
596 if (ptr) {
597 cctx->tag_set = 1;
598 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
599 }
600 cctx->M = arg;
601 return 1;
602
603 case EVP_CTRL_AEAD_GET_TAG:
604 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
605 return 0;
606 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
607 return 0;
608 cctx->tag_set = 0;
609 cctx->iv_set = 0;
610 cctx->len_set = 0;
611 return 1;
612
613 case EVP_CTRL_COPY:
614 {
615 EVP_CIPHER_CTX *out = ptr;
616 EVP_ARIA_CCM_CTX *cctx_out = EVP_C_DATA(EVP_ARIA_CCM_CTX,out);
617 if (cctx->ccm.key) {
618 if (cctx->ccm.key != &cctx->ks)
619 return 0;
620 cctx_out->ccm.key = &cctx_out->ks;
621 }
622 return 1;
623 }
624
625 default:
626 return -1;
627 }
628 }
629
630 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
631 const unsigned char *in, size_t len)
632 {
633 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
634 CCM128_CONTEXT *ccm = &cctx->ccm;
635
636 /* Encrypt/decrypt must be performed in place */
637 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
638 return -1;
639 /* If encrypting set explicit IV from sequence number (start of AAD) */
640 if (EVP_CIPHER_CTX_encrypting(ctx))
641 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
642 EVP_CCM_TLS_EXPLICIT_IV_LEN);
643 /* Get rest of IV from explicit IV */
644 memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
645 EVP_CCM_TLS_EXPLICIT_IV_LEN);
646 /* Correct length value */
647 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
648 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
649 len))
650 return -1;
651 /* Use saved AAD */
652 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
653 /* Fix buffer to point to payload */
654 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
655 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
656 if (EVP_CIPHER_CTX_encrypting(ctx)) {
657 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
658 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
659 return -1;
660 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
661 return -1;
662 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
663 } else {
664 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, cctx->str)
665 : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
666 unsigned char tag[16];
667 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
668 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
669 return len;
670 }
671 }
672 OPENSSL_cleanse(out, len);
673 return -1;
674 }
675 }
676
677 static int aria_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
678 const unsigned char *in, size_t len)
679 {
680 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
681 CCM128_CONTEXT *ccm = &cctx->ccm;
682
683 /* If not set up, return error */
684 if (!cctx->key_set)
685 return -1;
686
687 if (cctx->tls_aad_len >= 0)
688 return aria_ccm_tls_cipher(ctx, out, in, len);
689
690 /* EVP_*Final() doesn't return any data */
691 if (in == NULL && out != NULL)
692 return 0;
693
694 if (!cctx->iv_set)
695 return -1;
696
697 if (!out) {
698 if (!in) {
699 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
700 return -1;
701 cctx->len_set = 1;
702 return len;
703 }
704 /* If have AAD need message length */
705 if (!cctx->len_set && len)
706 return -1;
707 CRYPTO_ccm128_aad(ccm, in, len);
708 return len;
709 }
710
711 /* The tag must be set before actually decrypting data */
712 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
713 return -1;
714
715 /* If not set length yet do it */
716 if (!cctx->len_set) {
717 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
718 return -1;
719 cctx->len_set = 1;
720 }
721 if (EVP_CIPHER_CTX_encrypting(ctx)) {
722 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
723 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
724 return -1;
725 cctx->tag_set = 1;
726 return len;
727 } else {
728 int rv = -1;
729 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
730 cctx->str) :
731 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
732 unsigned char tag[16];
733 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
734 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
735 cctx->M))
736 rv = len;
737 }
738 }
739 if (rv == -1)
740 OPENSSL_cleanse(out, len);
741 cctx->iv_set = 0;
742 cctx->tag_set = 0;
743 cctx->len_set = 0;
744 return rv;
745 }
746 }
747
748 #define aria_ccm_cleanup NULL
749
750 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
751 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
752 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
753 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER \
754 | EVP_CIPH_CUSTOM_IV_LENGTH)
755
756 #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
757 static const EVP_CIPHER aria_##keylen##_##mode = { \
758 nid##_##keylen##_##nmode, \
759 blocksize, keylen/8, ivlen, \
760 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
761 EVP_ORIG_GLOBAL, \
762 aria_##mode##_init_key, \
763 aria_##mode##_cipher, \
764 aria_##mode##_cleanup, \
765 sizeof(EVP_ARIA_##MODE##_CTX), \
766 NULL,NULL,aria_##mode##_ctrl,NULL }; \
767 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
768 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
769
770 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, gcm, gcm, GCM, 0)
771 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, gcm, gcm, GCM, 0)
772 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, gcm, gcm, GCM, 0)
773
774 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, ccm, ccm, CCM, 0)
775 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, ccm, ccm, CCM, 0)
776 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, ccm, ccm, CCM, 0)
777
778 #endif