]> git.ipfire.org Git - thirdparty/openssl.git/blame - crypto/evp/e_aria.c
Structure alignment macro.
[thirdparty/openssl.git] / crypto / evp / e_aria.c
CommitLineData
d42d0a4d 1/*
39147079 2 * Copyright 2017-2019 The OpenSSL Project Authors. All Rights Reserved.
5aba2b6e 3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
d42d0a4d 4 *
4a8b0c55 5 * Licensed under the Apache License 2.0 (the "License"). You may not use
d42d0a4d
P
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
9 */
10
d42d0a4d
P
11#include "internal/cryptlib.h"
12#ifndef OPENSSL_NO_ARIA
13# include <openssl/evp.h>
14# include <openssl/modes.h>
bc326738 15# include <openssl/rand.h>
6decf943 16# include <openssl/rand_drbg.h>
bc326738 17# include "internal/aria.h"
d42d0a4d 18# include "internal/evp_int.h"
bc326738
JS
19# include "modes_lcl.h"
20# include "evp_locl.h"
d42d0a4d
P
21
22/* ARIA subkey Structure */
23typedef struct {
24 ARIA_KEY ks;
25} EVP_ARIA_KEY;
26
bc326738
JS
27/* ARIA GCM context */
28typedef struct {
29 union {
39147079 30 OSSL_UNION_ALIGN;
bc326738
JS
31 ARIA_KEY ks;
32 } ks; /* ARIA subkey to use */
33 int key_set; /* Set if key initialised */
34 int iv_set; /* Set if an iv is set */
35 GCM128_CONTEXT gcm;
36 unsigned char *iv; /* Temporary IV store */
37 int ivlen; /* IV length */
38 int taglen;
39 int iv_gen; /* It is OK to generate IVs */
40 int tls_aad_len; /* TLS AAD length */
41} EVP_ARIA_GCM_CTX;
42
43/* ARIA CCM context */
44typedef struct {
45 union {
39147079 46 OSSL_UNION_ALIGN;
bc326738
JS
47 ARIA_KEY ks;
48 } ks; /* ARIA key schedule to use */
49 int key_set; /* Set if key initialised */
50 int iv_set; /* Set if an iv is set */
51 int tag_set; /* Set if tag is valid */
52 int len_set; /* Set if message length set */
53 int L, M; /* L and M parameters from RFC3610 */
54 int tls_aad_len; /* TLS AAD length */
55 CCM128_CONTEXT ccm;
56 ccm128_f str;
57} EVP_ARIA_CCM_CTX;
58
d42d0a4d
P
59/* The subkey for ARIA is generated. */
60static int aria_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
61 const unsigned char *iv, int enc)
62{
63 int ret;
64 int mode = EVP_CIPHER_CTX_mode(ctx);
65
66 if (enc || (mode != EVP_CIPH_ECB_MODE && mode != EVP_CIPH_CBC_MODE))
67 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
68 EVP_CIPHER_CTX_get_cipher_data(ctx));
69 else
70 ret = aria_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
71 EVP_CIPHER_CTX_get_cipher_data(ctx));
72 if (ret < 0) {
73 EVPerr(EVP_F_ARIA_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
74 return 0;
75 }
76 return 1;
77}
78
79static void aria_cbc_encrypt(const unsigned char *in, unsigned char *out,
80 size_t len, const ARIA_KEY *key,
81 unsigned char *ivec, const int enc)
82{
83
84 if (enc)
85 CRYPTO_cbc128_encrypt(in, out, len, key, ivec,
86 (block128_f) aria_encrypt);
87 else
88 CRYPTO_cbc128_decrypt(in, out, len, key, ivec,
89 (block128_f) aria_encrypt);
90}
91
92static void aria_cfb128_encrypt(const unsigned char *in, unsigned char *out,
93 size_t length, const ARIA_KEY *key,
94 unsigned char *ivec, int *num, const int enc)
95{
96
97 CRYPTO_cfb128_encrypt(in, out, length, key, ivec, num, enc,
98 (block128_f) aria_encrypt);
99}
100
101static void aria_cfb1_encrypt(const unsigned char *in, unsigned char *out,
102 size_t length, const ARIA_KEY *key,
103 unsigned char *ivec, int *num, const int enc)
104{
105 CRYPTO_cfb128_1_encrypt(in, out, length, key, ivec, num, enc,
106 (block128_f) aria_encrypt);
107}
108
109static void aria_cfb8_encrypt(const unsigned char *in, unsigned char *out,
110 size_t length, const ARIA_KEY *key,
111 unsigned char *ivec, int *num, const int enc)
112{
113 CRYPTO_cfb128_8_encrypt(in, out, length, key, ivec, num, enc,
114 (block128_f) aria_encrypt);
115}
116
117static void aria_ecb_encrypt(const unsigned char *in, unsigned char *out,
118 const ARIA_KEY *key, const int enc)
119{
120 aria_encrypt(in, out, key);
121}
122
123static void aria_ofb128_encrypt(const unsigned char *in, unsigned char *out,
124 size_t length, const ARIA_KEY *key,
125 unsigned char *ivec, int *num)
126{
127 CRYPTO_ofb128_encrypt(in, out, length, key, ivec, num,
128 (block128_f) aria_encrypt);
129}
130
131IMPLEMENT_BLOCK_CIPHER(aria_128, ks, aria, EVP_ARIA_KEY,
132 NID_aria_128, 16, 16, 16, 128,
133 0, aria_init_key, NULL,
134 EVP_CIPHER_set_asn1_iv,
135 EVP_CIPHER_get_asn1_iv,
136 NULL)
137IMPLEMENT_BLOCK_CIPHER(aria_192, ks, aria, EVP_ARIA_KEY,
138 NID_aria_192, 16, 24, 16, 128,
139 0, aria_init_key, NULL,
140 EVP_CIPHER_set_asn1_iv,
141 EVP_CIPHER_get_asn1_iv,
142 NULL)
143IMPLEMENT_BLOCK_CIPHER(aria_256, ks, aria, EVP_ARIA_KEY,
144 NID_aria_256, 16, 32, 16, 128,
145 0, aria_init_key, NULL,
146 EVP_CIPHER_set_asn1_iv,
147 EVP_CIPHER_get_asn1_iv,
148 NULL)
149
150# define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
151 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
152IMPLEMENT_ARIA_CFBR(128,1)
153IMPLEMENT_ARIA_CFBR(192,1)
154IMPLEMENT_ARIA_CFBR(256,1)
155IMPLEMENT_ARIA_CFBR(128,8)
156IMPLEMENT_ARIA_CFBR(192,8)
157IMPLEMENT_ARIA_CFBR(256,8)
158
159# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
160static const EVP_CIPHER aria_##keylen##_##mode = { \
161 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
162 flags|EVP_CIPH_##MODE##_MODE, \
163 aria_init_key, \
164 aria_##mode##_cipher, \
165 NULL, \
166 sizeof(EVP_ARIA_KEY), \
167 NULL,NULL,NULL,NULL }; \
168const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
169{ return &aria_##keylen##_##mode; }
170
171static int aria_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
172 const unsigned char *in, size_t len)
173{
174 unsigned int num = EVP_CIPHER_CTX_num(ctx);
175 EVP_ARIA_KEY *dat = EVP_C_DATA(EVP_ARIA_KEY,ctx);
176
177 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
178 EVP_CIPHER_CTX_iv_noconst(ctx),
179 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
180 (block128_f) aria_encrypt);
181 EVP_CIPHER_CTX_set_num(ctx, num);
182 return 1;
183}
184
185BLOCK_CIPHER_generic(NID_aria, 128, 1, 16, ctr, ctr, CTR, 0)
186BLOCK_CIPHER_generic(NID_aria, 192, 1, 16, ctr, ctr, CTR, 0)
187BLOCK_CIPHER_generic(NID_aria, 256, 1, 16, ctr, ctr, CTR, 0)
188
bc326738
JS
189/* Authenticated cipher modes (GCM/CCM) */
190
191/* increment counter (64-bit int) by 1 */
192static void ctr64_inc(unsigned char *counter)
193{
194 int n = 8;
195 unsigned char c;
196
197 do {
198 --n;
199 c = counter[n];
200 ++c;
201 counter[n] = c;
202 if (c)
203 return;
204 } while (n);
205}
206
207static int aria_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
208 const unsigned char *iv, int enc)
209{
210 int ret;
211 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
212
213 if (!iv && !key)
214 return 1;
215 if (key) {
216 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
217 &gctx->ks.ks);
218 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
219 (block128_f) aria_encrypt);
220 if (ret < 0) {
221 EVPerr(EVP_F_ARIA_GCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
222 return 0;
223 }
224
225 /*
226 * If we have an iv can set it directly, otherwise use saved IV.
227 */
228 if (iv == NULL && gctx->iv_set)
229 iv = gctx->iv;
230 if (iv) {
231 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
232 gctx->iv_set = 1;
233 }
234 gctx->key_set = 1;
235 } else {
236 /* If key set use IV, otherwise copy */
237 if (gctx->key_set)
238 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
239 else
240 memcpy(gctx->iv, iv, gctx->ivlen);
241 gctx->iv_set = 1;
242 gctx->iv_gen = 0;
243 }
244 return 1;
245}
246
247static int aria_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
248{
249 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,c);
250
251 switch (type) {
252 case EVP_CTRL_INIT:
253 gctx->key_set = 0;
254 gctx->iv_set = 0;
255 gctx->ivlen = EVP_CIPHER_CTX_iv_length(c);
256 gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
257 gctx->taglen = -1;
258 gctx->iv_gen = 0;
259 gctx->tls_aad_len = -1;
260 return 1;
261
262 case EVP_CTRL_AEAD_SET_IVLEN:
263 if (arg <= 0)
264 return 0;
265 /* Allocate memory for IV if needed */
266 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
267 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
268 OPENSSL_free(gctx->iv);
cdb10bae
RS
269 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
270 EVPerr(EVP_F_ARIA_GCM_CTRL, ERR_R_MALLOC_FAILURE);
bc326738 271 return 0;
cdb10bae 272 }
bc326738
JS
273 }
274 gctx->ivlen = arg;
275 return 1;
276
277 case EVP_CTRL_AEAD_SET_TAG:
278 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
279 return 0;
280 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
281 gctx->taglen = arg;
282 return 1;
283
284 case EVP_CTRL_AEAD_GET_TAG:
285 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
286 || gctx->taglen < 0)
287 return 0;
288 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
289 return 1;
290
291 case EVP_CTRL_GCM_SET_IV_FIXED:
292 /* Special case: -1 length restores whole IV */
293 if (arg == -1) {
294 memcpy(gctx->iv, ptr, gctx->ivlen);
295 gctx->iv_gen = 1;
296 return 1;
297 }
298 /*
299 * Fixed field must be at least 4 bytes and invocation field at least
300 * 8.
301 */
302 if ((arg < 4) || (gctx->ivlen - arg) < 8)
303 return 0;
304 if (arg)
305 memcpy(gctx->iv, ptr, arg);
16cfc2c9
KR
306 if (EVP_CIPHER_CTX_encrypting(c)
307 && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
308 return 0;
bc326738
JS
309 gctx->iv_gen = 1;
310 return 1;
311
312 case EVP_CTRL_GCM_IV_GEN:
313 if (gctx->iv_gen == 0 || gctx->key_set == 0)
314 return 0;
315 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
316 if (arg <= 0 || arg > gctx->ivlen)
317 arg = gctx->ivlen;
318 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
319 /*
320 * Invocation field will be at least 8 bytes in size and so no need
321 * to check wrap around or increment more than last 8 bytes.
322 */
323 ctr64_inc(gctx->iv + gctx->ivlen - 8);
324 gctx->iv_set = 1;
325 return 1;
326
327 case EVP_CTRL_GCM_SET_IV_INV:
328 if (gctx->iv_gen == 0 || gctx->key_set == 0
329 || EVP_CIPHER_CTX_encrypting(c))
330 return 0;
331 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
332 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
333 gctx->iv_set = 1;
334 return 1;
335
336 case EVP_CTRL_AEAD_TLS1_AAD:
337 /* Save the AAD for later use */
338 if (arg != EVP_AEAD_TLS1_AAD_LEN)
339 return 0;
340 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
341 gctx->tls_aad_len = arg;
342 {
343 unsigned int len =
344 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
345 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
346 /* Correct length for explicit IV */
347 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
348 return 0;
349 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
350 /* If decrypting correct for tag too */
351 if (!EVP_CIPHER_CTX_encrypting(c)) {
352 if (len < EVP_GCM_TLS_TAG_LEN)
353 return 0;
354 len -= EVP_GCM_TLS_TAG_LEN;
355 }
356 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
357 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
358 }
359 /* Extra padding: tag appended to record */
360 return EVP_GCM_TLS_TAG_LEN;
361
362 case EVP_CTRL_COPY:
363 {
364 EVP_CIPHER_CTX *out = ptr;
365 EVP_ARIA_GCM_CTX *gctx_out = EVP_C_DATA(EVP_ARIA_GCM_CTX,out);
366 if (gctx->gcm.key) {
367 if (gctx->gcm.key != &gctx->ks)
368 return 0;
369 gctx_out->gcm.key = &gctx_out->ks;
370 }
371 if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
372 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
373 else {
cdb10bae
RS
374 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
375 EVPerr(EVP_F_ARIA_GCM_CTRL, ERR_R_MALLOC_FAILURE);
bc326738 376 return 0;
cdb10bae 377 }
bc326738
JS
378 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
379 }
380 return 1;
381 }
382
383 default:
384 return -1;
385
386 }
387}
388
389static int aria_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
390 const unsigned char *in, size_t len)
391{
392 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
393 int rv = -1;
394
395 /* Encrypt/decrypt must be performed in place */
396 if (out != in
397 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
398 return -1;
399 /*
400 * Set IV from start of buffer or generate IV and write to start of
401 * buffer.
402 */
403 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
404 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
405 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
406 goto err;
407 /* Use saved AAD */
408 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
409 gctx->tls_aad_len))
410 goto err;
411 /* Fix buffer and length to point to payload */
412 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
413 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
414 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
415 if (EVP_CIPHER_CTX_encrypting(ctx)) {
416 /* Encrypt payload */
417 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
418 goto err;
419 out += len;
420 /* Finally write tag */
421 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
422 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
423 } else {
424 /* Decrypt */
425 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
426 goto err;
427 /* Retrieve tag */
428 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
429 EVP_GCM_TLS_TAG_LEN);
430 /* If tag mismatch wipe buffer */
431 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
432 EVP_GCM_TLS_TAG_LEN)) {
433 OPENSSL_cleanse(out, len);
434 goto err;
435 }
436 rv = len;
437 }
438
439 err:
440 gctx->iv_set = 0;
441 gctx->tls_aad_len = -1;
442 return rv;
443}
444
445static int aria_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
446 const unsigned char *in, size_t len)
447{
448 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
449
450 /* If not set up, return error */
451 if (!gctx->key_set)
452 return -1;
453
454 if (gctx->tls_aad_len >= 0)
455 return aria_gcm_tls_cipher(ctx, out, in, len);
456
457 if (!gctx->iv_set)
458 return -1;
459 if (in) {
460 if (out == NULL) {
461 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
462 return -1;
463 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
464 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
465 return -1;
466 } else {
467 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
468 return -1;
469 }
470 return len;
471 }
472 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
473 if (gctx->taglen < 0)
474 return -1;
475 if (CRYPTO_gcm128_finish(&gctx->gcm,
476 EVP_CIPHER_CTX_buf_noconst(ctx),
477 gctx->taglen) != 0)
478 return -1;
479 gctx->iv_set = 0;
480 return 0;
481 }
482 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
483 gctx->taglen = 16;
484 /* Don't reuse the IV */
485 gctx->iv_set = 0;
486 return 0;
487}
488
183f52e2
MC
489static int aria_gcm_cleanup(EVP_CIPHER_CTX *ctx)
490{
491 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, ctx);
492
493 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(ctx))
494 OPENSSL_free(gctx->iv);
495
496 return 1;
497}
498
bc326738
JS
499static int aria_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
500 const unsigned char *iv, int enc)
501{
502 int ret;
503 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
504
505 if (!iv && !key)
506 return 1;
507
508 if (key) {
509 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
510 &cctx->ks.ks);
511 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
512 &cctx->ks, (block128_f) aria_encrypt);
513 if (ret < 0) {
514 EVPerr(EVP_F_ARIA_CCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
515 return 0;
516 }
517 cctx->str = NULL;
518 cctx->key_set = 1;
519 }
520 if (iv) {
521 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
522 cctx->iv_set = 1;
523 }
524 return 1;
525}
526
527static int aria_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
528{
529 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,c);
530
531 switch (type) {
532 case EVP_CTRL_INIT:
533 cctx->key_set = 0;
534 cctx->iv_set = 0;
535 cctx->L = 8;
536 cctx->M = 12;
537 cctx->tag_set = 0;
538 cctx->len_set = 0;
539 cctx->tls_aad_len = -1;
540 return 1;
541
542 case EVP_CTRL_AEAD_TLS1_AAD:
543 /* Save the AAD for later use */
544 if (arg != EVP_AEAD_TLS1_AAD_LEN)
545 return 0;
546 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
547 cctx->tls_aad_len = arg;
548 {
549 uint16_t len =
550 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
551 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
552 /* Correct length for explicit IV */
553 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
554 return 0;
555 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
556 /* If decrypting correct for tag too */
557 if (!EVP_CIPHER_CTX_encrypting(c)) {
558 if (len < cctx->M)
559 return 0;
560 len -= cctx->M;
561 }
562 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
563 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
564 }
565 /* Extra padding: tag appended to record */
566 return cctx->M;
567
568 case EVP_CTRL_CCM_SET_IV_FIXED:
569 /* Sanity check length */
570 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
571 return 0;
572 /* Just copy to first part of IV */
573 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
574 return 1;
575
576 case EVP_CTRL_AEAD_SET_IVLEN:
577 arg = 15 - arg;
578 /* fall thru */
579 case EVP_CTRL_CCM_SET_L:
580 if (arg < 2 || arg > 8)
581 return 0;
582 cctx->L = arg;
583 return 1;
584 case EVP_CTRL_AEAD_SET_TAG:
585 if ((arg & 1) || arg < 4 || arg > 16)
586 return 0;
587 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
588 return 0;
589 if (ptr) {
590 cctx->tag_set = 1;
591 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
592 }
593 cctx->M = arg;
594 return 1;
595
596 case EVP_CTRL_AEAD_GET_TAG:
597 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
598 return 0;
599 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
600 return 0;
601 cctx->tag_set = 0;
602 cctx->iv_set = 0;
603 cctx->len_set = 0;
604 return 1;
605
606 case EVP_CTRL_COPY:
607 {
608 EVP_CIPHER_CTX *out = ptr;
609 EVP_ARIA_CCM_CTX *cctx_out = EVP_C_DATA(EVP_ARIA_CCM_CTX,out);
610 if (cctx->ccm.key) {
611 if (cctx->ccm.key != &cctx->ks)
612 return 0;
613 cctx_out->ccm.key = &cctx_out->ks;
614 }
615 return 1;
616 }
617
618 default:
619 return -1;
620 }
621}
622
623static int aria_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
624 const unsigned char *in, size_t len)
625{
626 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
627 CCM128_CONTEXT *ccm = &cctx->ccm;
628
629 /* Encrypt/decrypt must be performed in place */
630 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
631 return -1;
632 /* If encrypting set explicit IV from sequence number (start of AAD) */
633 if (EVP_CIPHER_CTX_encrypting(ctx))
634 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
635 EVP_CCM_TLS_EXPLICIT_IV_LEN);
636 /* Get rest of IV from explicit IV */
637 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
638 EVP_CCM_TLS_EXPLICIT_IV_LEN);
639 /* Correct length value */
640 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
641 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
642 len))
643 return -1;
644 /* Use saved AAD */
645 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
646 /* Fix buffer to point to payload */
647 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
648 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
649 if (EVP_CIPHER_CTX_encrypting(ctx)) {
650 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
651 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
652 return -1;
653 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
654 return -1;
655 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
656 } else {
657 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, cctx->str)
658 : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
659 unsigned char tag[16];
660 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
661 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
662 return len;
663 }
664 }
665 OPENSSL_cleanse(out, len);
666 return -1;
667 }
668}
669
670static int aria_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
671 const unsigned char *in, size_t len)
672{
673 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
674 CCM128_CONTEXT *ccm = &cctx->ccm;
675
676 /* If not set up, return error */
677 if (!cctx->key_set)
678 return -1;
679
680 if (cctx->tls_aad_len >= 0)
681 return aria_ccm_tls_cipher(ctx, out, in, len);
682
683 /* EVP_*Final() doesn't return any data */
684 if (in == NULL && out != NULL)
685 return 0;
686
687 if (!cctx->iv_set)
688 return -1;
689
690 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
691 return -1;
692 if (!out) {
693 if (!in) {
694 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
695 15 - cctx->L, len))
696 return -1;
697 cctx->len_set = 1;
698 return len;
699 }
700 /* If have AAD need message length */
701 if (!cctx->len_set && len)
702 return -1;
703 CRYPTO_ccm128_aad(ccm, in, len);
704 return len;
705 }
706 /* If not set length yet do it */
707 if (!cctx->len_set) {
708 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
709 15 - cctx->L, len))
710 return -1;
711 cctx->len_set = 1;
712 }
713 if (EVP_CIPHER_CTX_encrypting(ctx)) {
714 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
715 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
716 return -1;
717 cctx->tag_set = 1;
718 return len;
719 } else {
720 int rv = -1;
721 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
722 cctx->str) :
723 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
724 unsigned char tag[16];
725 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
726 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
727 cctx->M))
728 rv = len;
729 }
730 }
731 if (rv == -1)
732 OPENSSL_cleanse(out, len);
733 cctx->iv_set = 0;
734 cctx->tag_set = 0;
735 cctx->len_set = 0;
736 return rv;
737 }
738}
739
183f52e2
MC
740#define aria_ccm_cleanup NULL
741
bc326738
JS
742#define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
743 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
744 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
745 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER)
746
747#define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
748static const EVP_CIPHER aria_##keylen##_##mode = { \
749 nid##_##keylen##_##nmode, \
750 blocksize, keylen/8, ivlen, \
751 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
752 aria_##mode##_init_key, \
753 aria_##mode##_cipher, \
183f52e2 754 aria_##mode##_cleanup, \
bc326738
JS
755 sizeof(EVP_ARIA_##MODE##_CTX), \
756 NULL,NULL,aria_##mode##_ctrl,NULL }; \
757const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
758{ return (EVP_CIPHER*)&aria_##keylen##_##mode; }
759
760BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, gcm, gcm, GCM, 0)
761BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, gcm, gcm, GCM, 0)
762BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, gcm, gcm, GCM, 0)
763
764BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, ccm, ccm, CCM, 0)
765BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, ccm, ccm, CCM, 0)
766BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, ccm, ccm, CCM, 0)
767
d42d0a4d 768#endif