]> git.ipfire.org Git - thirdparty/linux.git/blame - drivers/crypto/ccree/cc_cipher.c
Merge tag 'x86-fpu-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
[thirdparty/linux.git] / drivers / crypto / ccree / cc_cipher.c
CommitLineData
63ee04c8 1// SPDX-License-Identifier: GPL-2.0
03963cae 2/* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
63ee04c8
GBY
3
4#include <linux/kernel.h>
5#include <linux/module.h>
6#include <crypto/algapi.h>
7#include <crypto/internal/skcipher.h>
00cd6b23 8#include <crypto/internal/des.h>
63ee04c8 9#include <crypto/xts.h>
9b8d51f8 10#include <crypto/sm4.h>
63ee04c8
GBY
11#include <crypto/scatterwalk.h>
12
13#include "cc_driver.h"
14#include "cc_lli_defs.h"
15#include "cc_buffer_mgr.h"
16#include "cc_cipher.h"
17#include "cc_request_mgr.h"
18
5620eb6c 19#define MAX_SKCIPHER_SEQ_LEN 6
63ee04c8
GBY
20
21#define template_skcipher template_u.skcipher
22
63ee04c8
GBY
23struct cc_user_key_info {
24 u8 *key;
25 dma_addr_t key_dma_addr;
26};
27
28struct cc_hw_key_info {
29 enum cc_hw_crypto_key key1_slot;
30 enum cc_hw_crypto_key key2_slot;
31};
32
52f42c65
GBY
33struct cc_cpp_key_info {
34 u8 slot;
35 enum cc_cpp_alg alg;
36};
37
38enum cc_key_type {
39 CC_UNPROTECTED_KEY, /* User key */
40 CC_HW_PROTECTED_KEY, /* HW (FDE) key */
41 CC_POLICY_PROTECTED_KEY, /* CPP key */
42 CC_INVALID_PROTECTED_KEY /* Invalid key */
43};
44
63ee04c8
GBY
45struct cc_cipher_ctx {
46 struct cc_drvdata *drvdata;
47 int keylen;
48 int key_round_number;
49 int cipher_mode;
50 int flow_mode;
51 unsigned int flags;
52f42c65 52 enum cc_key_type key_type;
63ee04c8 53 struct cc_user_key_info user;
52f42c65
GBY
54 union {
55 struct cc_hw_key_info hw;
56 struct cc_cpp_key_info cpp;
57 };
63ee04c8
GBY
58 struct crypto_shash *shash_tfm;
59};
60
61static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
62
52f42c65 63static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
a794d8d8
GBY
64{
65 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
66
52f42c65 67 return ctx_p->key_type;
a794d8d8
GBY
68}
69
63ee04c8
GBY
70static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
71{
72 switch (ctx_p->flow_mode) {
73 case S_DIN_to_AES:
74 switch (size) {
75 case CC_AES_128_BIT_KEY_SIZE:
76 case CC_AES_192_BIT_KEY_SIZE:
77 if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
78 ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
79 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
80 return 0;
81 break;
82 case CC_AES_256_BIT_KEY_SIZE:
83 return 0;
84 case (CC_AES_192_BIT_KEY_SIZE * 2):
85 case (CC_AES_256_BIT_KEY_SIZE * 2):
86 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
87 ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
88 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
89 return 0;
90 break;
91 default:
92 break;
93 }
b5be8531 94 break;
63ee04c8
GBY
95 case S_DIN_to_DES:
96 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
97 return 0;
98 break;
9b8d51f8
GBY
99 case S_DIN_to_SM4:
100 if (size == SM4_KEY_SIZE)
101 return 0;
63ee04c8
GBY
102 default:
103 break;
104 }
105 return -EINVAL;
106}
107
108static int validate_data_size(struct cc_cipher_ctx *ctx_p,
109 unsigned int size)
110{
111 switch (ctx_p->flow_mode) {
112 case S_DIN_to_AES:
113 switch (ctx_p->cipher_mode) {
114 case DRV_CIPHER_XTS:
63ee04c8
GBY
115 case DRV_CIPHER_CBC_CTS:
116 if (size >= AES_BLOCK_SIZE)
117 return 0;
118 break;
119 case DRV_CIPHER_OFB:
120 case DRV_CIPHER_CTR:
121 return 0;
122 case DRV_CIPHER_ECB:
123 case DRV_CIPHER_CBC:
124 case DRV_CIPHER_ESSIV:
125 case DRV_CIPHER_BITLOCKER:
126 if (IS_ALIGNED(size, AES_BLOCK_SIZE))
127 return 0;
128 break;
129 default:
130 break;
131 }
132 break;
133 case S_DIN_to_DES:
134 if (IS_ALIGNED(size, DES_BLOCK_SIZE))
135 return 0;
136 break;
9b8d51f8
GBY
137 case S_DIN_to_SM4:
138 switch (ctx_p->cipher_mode) {
139 case DRV_CIPHER_CTR:
140 return 0;
141 case DRV_CIPHER_ECB:
142 case DRV_CIPHER_CBC:
143 if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
144 return 0;
145 default:
146 break;
147 }
63ee04c8
GBY
148 default:
149 break;
150 }
151 return -EINVAL;
152}
153
154static int cc_cipher_init(struct crypto_tfm *tfm)
155{
156 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
157 struct cc_crypto_alg *cc_alg =
158 container_of(tfm->__crt_alg, struct cc_crypto_alg,
159 skcipher_alg.base);
160 struct device *dev = drvdata_to_dev(cc_alg->drvdata);
161 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
162 int rc = 0;
163
164 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
165 crypto_tfm_alg_name(tfm));
166
167 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
168 sizeof(struct cipher_req_ctx));
169
170 ctx_p->cipher_mode = cc_alg->cipher_mode;
171 ctx_p->flow_mode = cc_alg->flow_mode;
172 ctx_p->drvdata = cc_alg->drvdata;
173
174 /* Allocate key buffer, cache line aligned */
175 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
176 if (!ctx_p->user.key)
177 return -ENOMEM;
178
179 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
180 ctx_p->user.key);
181
182 /* Map key buffer */
f4274eec 183 ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key,
63ee04c8
GBY
184 max_key_buf_size,
185 DMA_TO_DEVICE);
186 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
187 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
188 max_key_buf_size, ctx_p->user.key);
189 return -ENOMEM;
190 }
191 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
192 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
193
194 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
195 /* Alloc hash tfm for essiv */
196 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
197 if (IS_ERR(ctx_p->shash_tfm)) {
198 dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
199 return PTR_ERR(ctx_p->shash_tfm);
200 }
201 }
202
203 return rc;
204}
205
206static void cc_cipher_exit(struct crypto_tfm *tfm)
207{
208 struct crypto_alg *alg = tfm->__crt_alg;
209 struct cc_crypto_alg *cc_alg =
210 container_of(alg, struct cc_crypto_alg,
211 skcipher_alg.base);
212 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
213 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
214 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
215
216 dev_dbg(dev, "Clearing context @%p for %s\n",
217 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
218
219 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
220 /* Free hash tfm for essiv */
221 crypto_free_shash(ctx_p->shash_tfm);
222 ctx_p->shash_tfm = NULL;
223 }
224
225 /* Unmap key buffer */
226 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
227 DMA_TO_DEVICE);
228 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
229 &ctx_p->user.key_dma_addr);
230
231 /* Free key buffer in context */
232 kzfree(ctx_p->user.key);
233 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
234}
235
236struct tdes_keys {
237 u8 key1[DES_KEY_SIZE];
238 u8 key2[DES_KEY_SIZE];
239 u8 key3[DES_KEY_SIZE];
240};
241
52f42c65 242static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
63ee04c8
GBY
243{
244 switch (slot_num) {
245 case 0:
246 return KFDE0_KEY;
247 case 1:
248 return KFDE1_KEY;
249 case 2:
250 return KFDE2_KEY;
251 case 3:
252 return KFDE3_KEY;
253 }
254 return END_OF_KEYS;
255}
256
52f42c65
GBY
257static u8 cc_slot_to_cpp_key(u8 slot_num)
258{
259 return (slot_num - CC_FIRST_CPP_KEY_SLOT);
260}
261
262static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
263{
264 if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
265 return CC_HW_PROTECTED_KEY;
266 else if (slot_num >= CC_FIRST_CPP_KEY_SLOT &&
267 slot_num <= CC_LAST_CPP_KEY_SLOT)
268 return CC_POLICY_PROTECTED_KEY;
269 else
270 return CC_INVALID_PROTECTED_KEY;
271}
272
a794d8d8
GBY
273static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
274 unsigned int keylen)
63ee04c8
GBY
275{
276 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
277 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
278 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
a794d8d8 279 struct cc_hkey_info hki;
63ee04c8 280
a794d8d8 281 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
63ee04c8 282 ctx_p, crypto_tfm_alg_name(tfm), keylen);
f4274eec 283 dump_byte_array("key", key, keylen);
63ee04c8
GBY
284
285 /* STAT_PHASE_0: Init and sanity checks */
286
52f42c65 287 /* This check the size of the protected key token */
a794d8d8 288 if (keylen != sizeof(hki)) {
52f42c65 289 dev_err(dev, "Unsupported protected key size %d.\n", keylen);
a794d8d8
GBY
290 return -EINVAL;
291 }
292
a794d8d8
GBY
293 memcpy(&hki, key, keylen);
294
295 /* The real key len for crypto op is the size of the HW key
296 * referenced by the HW key slot, not the hardware key token
297 */
298 keylen = hki.keylen;
299
63ee04c8 300 if (validate_keys_sizes(ctx_p, keylen)) {
c7b31c88 301 dev_dbg(dev, "Unsupported key size %d.\n", keylen);
63ee04c8
GBY
302 return -EINVAL;
303 }
304
52f42c65 305 ctx_p->keylen = keylen;
63ee04c8 306
52f42c65
GBY
307 switch (cc_slot_to_key_type(hki.hw_key1)) {
308 case CC_HW_PROTECTED_KEY:
309 if (ctx_p->flow_mode == S_DIN_to_SM4) {
310 dev_err(dev, "Only AES HW protected keys are supported\n");
63ee04c8
GBY
311 return -EINVAL;
312 }
52f42c65
GBY
313
314 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
315 if (ctx_p->hw.key1_slot == END_OF_KEYS) {
316 dev_err(dev, "Unsupported hw key1 number (%d)\n",
317 hki.hw_key1);
63ee04c8
GBY
318 return -EINVAL;
319 }
320
52f42c65
GBY
321 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
322 ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
323 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
324 if (hki.hw_key1 == hki.hw_key2) {
325 dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
326 hki.hw_key1, hki.hw_key2);
327 return -EINVAL;
328 }
329
330 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
331 if (ctx_p->hw.key2_slot == END_OF_KEYS) {
332 dev_err(dev, "Unsupported hw key2 number (%d)\n",
333 hki.hw_key2);
334 return -EINVAL;
335 }
336 }
337
338 ctx_p->key_type = CC_HW_PROTECTED_KEY;
339 dev_dbg(dev, "HW protected key %d/%d set\n.",
340 ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
341 break;
342
343 case CC_POLICY_PROTECTED_KEY:
344 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
345 dev_err(dev, "CPP keys not supported in this hardware revision.\n");
346 return -EINVAL;
347 }
348
349 if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
350 ctx_p->cipher_mode != DRV_CIPHER_CTR) {
351 dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
352 return -EINVAL;
353 }
354
355 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
356 if (ctx_p->flow_mode == S_DIN_to_AES)
357 ctx_p->cpp.alg = CC_CPP_AES;
358 else /* Must be SM4 since due to sethkey registration */
359 ctx_p->cpp.alg = CC_CPP_SM4;
360 ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
22e2db68 361 dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
52f42c65
GBY
362 ctx_p->cpp.alg, ctx_p->cpp.slot);
363 break;
364
365 default:
366 dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
367 return -EINVAL;
368 }
a794d8d8
GBY
369
370 return 0;
371}
372
373static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
374 unsigned int keylen)
375{
376 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
377 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
378 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
a794d8d8
GBY
379 struct cc_crypto_alg *cc_alg =
380 container_of(tfm->__crt_alg, struct cc_crypto_alg,
381 skcipher_alg.base);
382 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
383
384 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
385 ctx_p, crypto_tfm_alg_name(tfm), keylen);
f4274eec 386 dump_byte_array("key", key, keylen);
63ee04c8 387
a794d8d8 388 /* STAT_PHASE_0: Init and sanity checks */
63ee04c8 389
a794d8d8 390 if (validate_keys_sizes(ctx_p, keylen)) {
c7b31c88 391 dev_dbg(dev, "Unsupported key size %d.\n", keylen);
a794d8d8 392 return -EINVAL;
63ee04c8
GBY
393 }
394
52f42c65 395 ctx_p->key_type = CC_UNPROTECTED_KEY;
a794d8d8 396
63ee04c8
GBY
397 /*
398 * Verify DES weak keys
399 * Note that we're dropping the expanded key since the
400 * HW does the expansion on its own.
401 */
402 if (ctx_p->flow_mode == S_DIN_to_DES) {
00cd6b23
AB
403 if ((keylen == DES3_EDE_KEY_SIZE &&
404 verify_skcipher_des3_key(sktfm, key)) ||
405 verify_skcipher_des_key(sktfm, key)) {
63ee04c8
GBY
406 dev_dbg(dev, "weak DES key");
407 return -EINVAL;
408 }
409 }
410
411 if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
412 xts_check_key(tfm, key, keylen)) {
413 dev_dbg(dev, "weak XTS key");
414 return -EINVAL;
415 }
416
417 /* STAT_PHASE_1: Copy key to ctx */
418 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
419 max_key_buf_size, DMA_TO_DEVICE);
420
421 memcpy(ctx_p->user.key, key, keylen);
422 if (keylen == 24)
423 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
424
425 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
426 /* sha256 for key2 - use sw implementation */
427 int key_len = keylen >> 1;
428 int err;
429
8cbb8097
EB
430 err = crypto_shash_tfm_digest(ctx_p->shash_tfm,
431 ctx_p->user.key, key_len,
432 ctx_p->user.key + key_len);
63ee04c8
GBY
433 if (err) {
434 dev_err(dev, "Failed to hash ESSIV key.\n");
435 return err;
436 }
437 }
438 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
439 max_key_buf_size, DMA_TO_DEVICE);
440 ctx_p->keylen = keylen;
441
442 dev_dbg(dev, "return safely");
443 return 0;
444}
445
6f17e00f
GBY
446static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
447{
448 switch (ctx_p->flow_mode) {
449 case S_DIN_to_AES:
450 return S_AES_to_DOUT;
451 case S_DIN_to_DES:
452 return S_DES_to_DOUT;
453 case S_DIN_to_SM4:
454 return S_SM4_to_DOUT;
455 default:
456 return ctx_p->flow_mode;
457 }
458}
459
460static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
461 struct cipher_req_ctx *req_ctx,
462 unsigned int ivsize, struct cc_hw_desc desc[],
463 unsigned int *seq_size)
464{
465 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
466 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
467 int cipher_mode = ctx_p->cipher_mode;
468 int flow_mode = cc_out_setup_mode(ctx_p);
469 int direction = req_ctx->gen_ctx.op_type;
470 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
471
472 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
473 return;
474
475 switch (cipher_mode) {
476 case DRV_CIPHER_ECB:
477 break;
478 case DRV_CIPHER_CBC:
479 case DRV_CIPHER_CBC_CTS:
480 case DRV_CIPHER_CTR:
481 case DRV_CIPHER_OFB:
482 /* Read next IV */
483 hw_desc_init(&desc[*seq_size]);
484 set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
485 set_cipher_config0(&desc[*seq_size], direction);
486 set_flow_mode(&desc[*seq_size], flow_mode);
487 set_cipher_mode(&desc[*seq_size], cipher_mode);
488 if (cipher_mode == DRV_CIPHER_CTR ||
489 cipher_mode == DRV_CIPHER_OFB) {
490 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
491 } else {
492 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
493 }
494 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
495 (*seq_size)++;
496 break;
497 case DRV_CIPHER_XTS:
498 case DRV_CIPHER_ESSIV:
499 case DRV_CIPHER_BITLOCKER:
500 /* IV */
501 hw_desc_init(&desc[*seq_size]);
502 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
503 set_cipher_mode(&desc[*seq_size], cipher_mode);
504 set_cipher_config0(&desc[*seq_size], direction);
505 set_flow_mode(&desc[*seq_size], flow_mode);
506 set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
507 NS_BIT, 1);
508 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
509 (*seq_size)++;
510 break;
511 default:
512 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
513 }
514}
515
5c83e8ec 516
dd8486c7 517static void cc_setup_state_desc(struct crypto_tfm *tfm,
63ee04c8
GBY
518 struct cipher_req_ctx *req_ctx,
519 unsigned int ivsize, unsigned int nbytes,
520 struct cc_hw_desc desc[],
521 unsigned int *seq_size)
522{
523 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
524 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
525 int cipher_mode = ctx_p->cipher_mode;
526 int flow_mode = ctx_p->flow_mode;
527 int direction = req_ctx->gen_ctx.op_type;
63ee04c8 528 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
63ee04c8
GBY
529
530 switch (cipher_mode) {
dd8486c7
GBY
531 case DRV_CIPHER_ECB:
532 break;
63ee04c8
GBY
533 case DRV_CIPHER_CBC:
534 case DRV_CIPHER_CBC_CTS:
535 case DRV_CIPHER_CTR:
536 case DRV_CIPHER_OFB:
dd8486c7 537 /* Load IV */
63ee04c8
GBY
538 hw_desc_init(&desc[*seq_size]);
539 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
540 NS_BIT);
541 set_cipher_config0(&desc[*seq_size], direction);
542 set_flow_mode(&desc[*seq_size], flow_mode);
543 set_cipher_mode(&desc[*seq_size], cipher_mode);
544 if (cipher_mode == DRV_CIPHER_CTR ||
545 cipher_mode == DRV_CIPHER_OFB) {
546 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
547 } else {
548 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
549 }
550 (*seq_size)++;
dd8486c7
GBY
551 break;
552 case DRV_CIPHER_XTS:
553 case DRV_CIPHER_ESSIV:
5c83e8ec
OD
554 case DRV_CIPHER_BITLOCKER:
555 break;
556 default:
557 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
558 }
559}
560
561
562static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
563 struct cipher_req_ctx *req_ctx,
564 unsigned int ivsize, unsigned int nbytes,
565 struct cc_hw_desc desc[],
566 unsigned int *seq_size)
567{
568 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
569 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
570 int cipher_mode = ctx_p->cipher_mode;
571 int flow_mode = ctx_p->flow_mode;
572 int direction = req_ctx->gen_ctx.op_type;
573 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
574 unsigned int key_len = ctx_p->keylen;
575 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
576 unsigned int du_size = nbytes;
577
578 struct cc_crypto_alg *cc_alg =
579 container_of(tfm->__crt_alg, struct cc_crypto_alg,
580 skcipher_alg.base);
581
582 if (cc_alg->data_unit)
583 du_size = cc_alg->data_unit;
584
585 switch (cipher_mode) {
586 case DRV_CIPHER_ECB:
587 break;
588 case DRV_CIPHER_CBC:
589 case DRV_CIPHER_CBC_CTS:
590 case DRV_CIPHER_CTR:
591 case DRV_CIPHER_OFB:
592 break;
593 case DRV_CIPHER_XTS:
594 case DRV_CIPHER_ESSIV:
dd8486c7
GBY
595 case DRV_CIPHER_BITLOCKER:
596 /* load XEX key */
597 hw_desc_init(&desc[*seq_size]);
598 set_cipher_mode(&desc[*seq_size], cipher_mode);
599 set_cipher_config0(&desc[*seq_size], direction);
52f42c65 600 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
dd8486c7
GBY
601 set_hw_crypto_key(&desc[*seq_size],
602 ctx_p->hw.key2_slot);
603 } else {
604 set_din_type(&desc[*seq_size], DMA_DLLI,
605 (key_dma_addr + (key_len / 2)),
606 (key_len / 2), NS_BIT);
607 }
608 set_xex_data_unit_size(&desc[*seq_size], du_size);
609 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
610 set_key_size_aes(&desc[*seq_size], (key_len / 2));
611 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
612 (*seq_size)++;
613
614 /* Load IV */
615 hw_desc_init(&desc[*seq_size]);
616 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
617 set_cipher_mode(&desc[*seq_size], cipher_mode);
618 set_cipher_config0(&desc[*seq_size], direction);
619 set_key_size_aes(&desc[*seq_size], (key_len / 2));
620 set_flow_mode(&desc[*seq_size], flow_mode);
621 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
622 CC_AES_BLOCK_SIZE, NS_BIT);
623 (*seq_size)++;
624 break;
625 default:
626 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
627 }
628}
629
533edf9f
GBY
630static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
631{
632 switch (ctx_p->flow_mode) {
633 case S_DIN_to_AES:
634 return DIN_AES_DOUT;
635 case S_DIN_to_DES:
636 return DIN_DES_DOUT;
637 case S_DIN_to_SM4:
638 return DIN_SM4_DOUT;
639 default:
640 return ctx_p->flow_mode;
641 }
642}
dd8486c7
GBY
643
644static void cc_setup_key_desc(struct crypto_tfm *tfm,
645 struct cipher_req_ctx *req_ctx,
646 unsigned int nbytes, struct cc_hw_desc desc[],
647 unsigned int *seq_size)
648{
649 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
650 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
651 int cipher_mode = ctx_p->cipher_mode;
652 int flow_mode = ctx_p->flow_mode;
653 int direction = req_ctx->gen_ctx.op_type;
654 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
655 unsigned int key_len = ctx_p->keylen;
52f42c65 656 unsigned int din_size;
dd8486c7 657
dd8486c7
GBY
658 switch (cipher_mode) {
659 case DRV_CIPHER_CBC:
660 case DRV_CIPHER_CBC_CTS:
661 case DRV_CIPHER_CTR:
662 case DRV_CIPHER_OFB:
63ee04c8
GBY
663 case DRV_CIPHER_ECB:
664 /* Load key */
665 hw_desc_init(&desc[*seq_size]);
533edf9f
GBY
666 set_cipher_mode(&desc[*seq_size], cipher_mode);
667 set_cipher_config0(&desc[*seq_size], direction);
668
52f42c65 669 if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
533edf9f
GBY
670 /* We use the AES key size coding for all CPP algs */
671 set_key_size_aes(&desc[*seq_size], key_len);
672 set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
673 flow_mode = cc_out_flow_mode(ctx_p);
52f42c65 674 } else {
52f42c65
GBY
675 if (flow_mode == S_DIN_to_AES) {
676 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
677 set_hw_crypto_key(&desc[*seq_size],
678 ctx_p->hw.key1_slot);
679 } else {
680 /* CC_POLICY_UNPROTECTED_KEY
681 * Invalid keys are filtered out in
682 * sethkey()
683 */
684 din_size = (key_len == 24) ?
685 AES_MAX_KEY_SIZE : key_len;
686
687 set_din_type(&desc[*seq_size], DMA_DLLI,
688 key_dma_addr, din_size,
689 NS_BIT);
690 }
691 set_key_size_aes(&desc[*seq_size], key_len);
63ee04c8 692 } else {
52f42c65 693 /*des*/
63ee04c8 694 set_din_type(&desc[*seq_size], DMA_DLLI,
52f42c65
GBY
695 key_dma_addr, key_len, NS_BIT);
696 set_key_size_des(&desc[*seq_size], key_len);
63ee04c8 697 }
52f42c65 698 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
63ee04c8 699 }
533edf9f 700 set_flow_mode(&desc[*seq_size], flow_mode);
63ee04c8
GBY
701 (*seq_size)++;
702 break;
703 case DRV_CIPHER_XTS:
704 case DRV_CIPHER_ESSIV:
705 case DRV_CIPHER_BITLOCKER:
706 /* Load AES key */
707 hw_desc_init(&desc[*seq_size]);
708 set_cipher_mode(&desc[*seq_size], cipher_mode);
709 set_cipher_config0(&desc[*seq_size], direction);
52f42c65 710 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
63ee04c8
GBY
711 set_hw_crypto_key(&desc[*seq_size],
712 ctx_p->hw.key1_slot);
713 } else {
714 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
715 (key_len / 2), NS_BIT);
716 }
717 set_key_size_aes(&desc[*seq_size], (key_len / 2));
718 set_flow_mode(&desc[*seq_size], flow_mode);
719 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
720 (*seq_size)++;
63ee04c8
GBY
721 break;
722 default:
723 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
724 }
725}
726
4b1d7deb
GBY
727static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
728 struct cipher_req_ctx *req_ctx,
729 struct scatterlist *dst, struct scatterlist *src,
730 unsigned int nbytes, void *areq,
731 struct cc_hw_desc desc[], unsigned int *seq_size)
732{
733 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
734 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
735
736 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
737 /* bypass */
738 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
739 &req_ctx->mlli_params.mlli_dma_addr,
740 req_ctx->mlli_params.mlli_len,
1a895f1d 741 ctx_p->drvdata->mlli_sram_addr);
4b1d7deb
GBY
742 hw_desc_init(&desc[*seq_size]);
743 set_din_type(&desc[*seq_size], DMA_DLLI,
744 req_ctx->mlli_params.mlli_dma_addr,
745 req_ctx->mlli_params.mlli_len, NS_BIT);
746 set_dout_sram(&desc[*seq_size],
747 ctx_p->drvdata->mlli_sram_addr,
748 req_ctx->mlli_params.mlli_len);
749 set_flow_mode(&desc[*seq_size], BYPASS);
750 (*seq_size)++;
751 }
752}
753
754static void cc_setup_flow_desc(struct crypto_tfm *tfm,
755 struct cipher_req_ctx *req_ctx,
756 struct scatterlist *dst, struct scatterlist *src,
6f17e00f
GBY
757 unsigned int nbytes, struct cc_hw_desc desc[],
758 unsigned int *seq_size)
63ee04c8
GBY
759{
760 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
761 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
533edf9f 762 unsigned int flow_mode = cc_out_flow_mode(ctx_p);
6f17e00f
GBY
763 bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
764 ctx_p->cipher_mode == DRV_CIPHER_ECB);
63ee04c8 765
63ee04c8
GBY
766 /* Process */
767 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
768 dev_dbg(dev, " data params addr %pad length 0x%X\n",
769 &sg_dma_address(src), nbytes);
770 dev_dbg(dev, " data params addr %pad length 0x%X\n",
771 &sg_dma_address(dst), nbytes);
772 hw_desc_init(&desc[*seq_size]);
773 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
774 nbytes, NS_BIT);
775 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
6f17e00f
GBY
776 nbytes, NS_BIT, (!last_desc ? 0 : 1));
777 if (last_desc)
27b3b22d 778 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
63ee04c8
GBY
779
780 set_flow_mode(&desc[*seq_size], flow_mode);
781 (*seq_size)++;
782 } else {
63ee04c8
GBY
783 hw_desc_init(&desc[*seq_size]);
784 set_din_type(&desc[*seq_size], DMA_MLLI,
785 ctx_p->drvdata->mlli_sram_addr,
786 req_ctx->in_mlli_nents, NS_BIT);
787 if (req_ctx->out_nents == 0) {
788 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
1a895f1d
GU
789 ctx_p->drvdata->mlli_sram_addr,
790 ctx_p->drvdata->mlli_sram_addr);
63ee04c8
GBY
791 set_dout_mlli(&desc[*seq_size],
792 ctx_p->drvdata->mlli_sram_addr,
793 req_ctx->in_mlli_nents, NS_BIT,
6f17e00f 794 (!last_desc ? 0 : 1));
63ee04c8
GBY
795 } else {
796 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
1a895f1d
GU
797 ctx_p->drvdata->mlli_sram_addr,
798 ctx_p->drvdata->mlli_sram_addr +
63ee04c8
GBY
799 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
800 set_dout_mlli(&desc[*seq_size],
801 (ctx_p->drvdata->mlli_sram_addr +
802 (LLI_ENTRY_BYTE_SIZE *
803 req_ctx->in_mlli_nents)),
804 req_ctx->out_mlli_nents, NS_BIT,
6f17e00f 805 (!last_desc ? 0 : 1));
63ee04c8 806 }
6f17e00f 807 if (last_desc)
27b3b22d 808 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
63ee04c8
GBY
809
810 set_flow_mode(&desc[*seq_size], flow_mode);
811 (*seq_size)++;
812 }
813}
814
815static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
816{
817 struct skcipher_request *req = (struct skcipher_request *)cc_req;
818 struct scatterlist *dst = req->dst;
819 struct scatterlist *src = req->src;
820 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
00904aa0 821 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
00904aa0 822 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
63ee04c8 823
a108f931
GBY
824 if (err != -EINPROGRESS) {
825 /* Not a BACKLOG notification */
826 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
827 memcpy(req->iv, req_ctx->iv, ivsize);
828 kzfree(req_ctx->iv);
829 }
830
63ee04c8
GBY
831 skcipher_request_complete(req, err);
832}
833
834static int cc_cipher_process(struct skcipher_request *req,
835 enum drv_crypto_direction direction)
836{
837 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
838 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
839 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
840 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
841 struct scatterlist *dst = req->dst;
842 struct scatterlist *src = req->src;
843 unsigned int nbytes = req->cryptlen;
844 void *iv = req->iv;
845 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
846 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
5620eb6c 847 struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN];
63ee04c8 848 struct cc_crypto_req cc_req = {};
84f366da 849 int rc;
63ee04c8
GBY
850 unsigned int seq_len = 0;
851 gfp_t flags = cc_gfp_flags(&req->base);
852
853 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
854 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
855 "Encrypt" : "Decrypt"), req, iv, nbytes);
856
857 /* STAT_PHASE_0: Init and sanity checks */
858
63ee04c8 859 if (validate_data_size(ctx_p, nbytes)) {
c7b31c88 860 dev_dbg(dev, "Unsupported data size %d.\n", nbytes);
63ee04c8
GBY
861 rc = -EINVAL;
862 goto exit_process;
863 }
864 if (nbytes == 0) {
865 /* No data to process is valid */
866 rc = 0;
867 goto exit_process;
868 }
869
870 /* The IV we are handed may be allocted from the stack so
871 * we must copy it to a DMAable buffer before use.
872 */
01745706 873 req_ctx->iv = kmemdup(iv, ivsize, flags);
63ee04c8
GBY
874 if (!req_ctx->iv) {
875 rc = -ENOMEM;
876 goto exit_process;
877 }
63ee04c8 878
63ee04c8 879 /* Setup request structure */
f4274eec
GU
880 cc_req.user_cb = cc_cipher_complete;
881 cc_req.user_arg = req;
63ee04c8 882
52f42c65
GBY
883 /* Setup CPP operation details */
884 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
885 cc_req.cpp.is_cpp = true;
886 cc_req.cpp.alg = ctx_p->cpp.alg;
887 cc_req.cpp.slot = ctx_p->cpp.slot;
888 }
889
63ee04c8
GBY
890 /* Setup request context */
891 req_ctx->gen_ctx.op_type = direction;
892
893 /* STAT_PHASE_1: Map buffers */
894
895 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
896 req_ctx->iv, src, dst, flags);
897 if (rc) {
898 dev_err(dev, "map_request() failed\n");
899 goto exit_process;
900 }
901
902 /* STAT_PHASE_2: Create sequence */
903
5c83e8ec 904 /* Setup state (IV) */
dd8486c7 905 cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
4b1d7deb
GBY
906 /* Setup MLLI line, if needed */
907 cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
dd8486c7
GBY
908 /* Setup key */
909 cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
5c83e8ec
OD
910 /* Setup state (IV and XEX key) */
911 cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
63ee04c8 912 /* Data processing */
6f17e00f
GBY
913 cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
914 /* Read next IV */
915 cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
63ee04c8 916
63ee04c8
GBY
917 /* STAT_PHASE_3: Lock HW and push sequence */
918
919 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
920 &req->base);
921 if (rc != -EINPROGRESS && rc != -EBUSY) {
922 /* Failed to send the request or request completed
923 * synchronously
924 */
925 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
926 }
927
928exit_process:
63ee04c8 929 if (rc != -EINPROGRESS && rc != -EBUSY) {
63ee04c8
GBY
930 kzfree(req_ctx->iv);
931 }
932
933 return rc;
934}
935
936static int cc_cipher_encrypt(struct skcipher_request *req)
937{
938 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
939
e30368f3 940 memset(req_ctx, 0, sizeof(*req_ctx));
63ee04c8
GBY
941
942 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
943}
944
945static int cc_cipher_decrypt(struct skcipher_request *req)
946{
63ee04c8 947 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
63ee04c8 948
e30368f3
GBY
949 memset(req_ctx, 0, sizeof(*req_ctx));
950
63ee04c8
GBY
951 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
952}
953
954/* Block cipher alg */
955static const struct cc_alg_template skcipher_algs[] = {
a794d8d8
GBY
956 {
957 .name = "xts(paes)",
958 .driver_name = "xts-paes-ccree",
67caef08 959 .blocksize = 1,
a794d8d8
GBY
960 .template_skcipher = {
961 .setkey = cc_cipher_sethkey,
962 .encrypt = cc_cipher_encrypt,
963 .decrypt = cc_cipher_decrypt,
964 .min_keysize = CC_HW_KEY_SIZE,
965 .max_keysize = CC_HW_KEY_SIZE,
966 .ivsize = AES_BLOCK_SIZE,
967 },
968 .cipher_mode = DRV_CIPHER_XTS,
969 .flow_mode = S_DIN_to_AES,
970 .min_hw_rev = CC_HW_REV_630,
1c876a90 971 .std_body = CC_STD_NIST,
f98f6e21 972 .sec_func = true,
a794d8d8
GBY
973 },
974 {
975 .name = "xts512(paes)",
976 .driver_name = "xts-paes-du512-ccree",
67caef08 977 .blocksize = 1,
a794d8d8
GBY
978 .template_skcipher = {
979 .setkey = cc_cipher_sethkey,
980 .encrypt = cc_cipher_encrypt,
981 .decrypt = cc_cipher_decrypt,
982 .min_keysize = CC_HW_KEY_SIZE,
983 .max_keysize = CC_HW_KEY_SIZE,
984 .ivsize = AES_BLOCK_SIZE,
985 },
986 .cipher_mode = DRV_CIPHER_XTS,
987 .flow_mode = S_DIN_to_AES,
988 .data_unit = 512,
989 .min_hw_rev = CC_HW_REV_712,
1c876a90 990 .std_body = CC_STD_NIST,
f98f6e21 991 .sec_func = true,
a794d8d8
GBY
992 },
993 {
994 .name = "xts4096(paes)",
995 .driver_name = "xts-paes-du4096-ccree",
67caef08 996 .blocksize = 1,
a794d8d8
GBY
997 .template_skcipher = {
998 .setkey = cc_cipher_sethkey,
999 .encrypt = cc_cipher_encrypt,
1000 .decrypt = cc_cipher_decrypt,
1001 .min_keysize = CC_HW_KEY_SIZE,
1002 .max_keysize = CC_HW_KEY_SIZE,
1003 .ivsize = AES_BLOCK_SIZE,
1004 },
1005 .cipher_mode = DRV_CIPHER_XTS,
1006 .flow_mode = S_DIN_to_AES,
1007 .data_unit = 4096,
1008 .min_hw_rev = CC_HW_REV_712,
1c876a90 1009 .std_body = CC_STD_NIST,
f98f6e21 1010 .sec_func = true,
a794d8d8
GBY
1011 },
1012 {
1013 .name = "essiv(paes)",
1014 .driver_name = "essiv-paes-ccree",
1015 .blocksize = AES_BLOCK_SIZE,
1016 .template_skcipher = {
1017 .setkey = cc_cipher_sethkey,
1018 .encrypt = cc_cipher_encrypt,
1019 .decrypt = cc_cipher_decrypt,
1020 .min_keysize = CC_HW_KEY_SIZE,
1021 .max_keysize = CC_HW_KEY_SIZE,
1022 .ivsize = AES_BLOCK_SIZE,
1023 },
1024 .cipher_mode = DRV_CIPHER_ESSIV,
1025 .flow_mode = S_DIN_to_AES,
1026 .min_hw_rev = CC_HW_REV_712,
1c876a90 1027 .std_body = CC_STD_NIST,
f98f6e21 1028 .sec_func = true,
a794d8d8
GBY
1029 },
1030 {
1031 .name = "essiv512(paes)",
1032 .driver_name = "essiv-paes-du512-ccree",
1033 .blocksize = AES_BLOCK_SIZE,
1034 .template_skcipher = {
1035 .setkey = cc_cipher_sethkey,
1036 .encrypt = cc_cipher_encrypt,
1037 .decrypt = cc_cipher_decrypt,
1038 .min_keysize = CC_HW_KEY_SIZE,
1039 .max_keysize = CC_HW_KEY_SIZE,
1040 .ivsize = AES_BLOCK_SIZE,
1041 },
1042 .cipher_mode = DRV_CIPHER_ESSIV,
1043 .flow_mode = S_DIN_to_AES,
1044 .data_unit = 512,
1045 .min_hw_rev = CC_HW_REV_712,
1c876a90 1046 .std_body = CC_STD_NIST,
f98f6e21 1047 .sec_func = true,
a794d8d8
GBY
1048 },
1049 {
1050 .name = "essiv4096(paes)",
1051 .driver_name = "essiv-paes-du4096-ccree",
1052 .blocksize = AES_BLOCK_SIZE,
1053 .template_skcipher = {
1054 .setkey = cc_cipher_sethkey,
1055 .encrypt = cc_cipher_encrypt,
1056 .decrypt = cc_cipher_decrypt,
1057 .min_keysize = CC_HW_KEY_SIZE,
1058 .max_keysize = CC_HW_KEY_SIZE,
1059 .ivsize = AES_BLOCK_SIZE,
1060 },
1061 .cipher_mode = DRV_CIPHER_ESSIV,
1062 .flow_mode = S_DIN_to_AES,
1063 .data_unit = 4096,
1064 .min_hw_rev = CC_HW_REV_712,
1c876a90 1065 .std_body = CC_STD_NIST,
f98f6e21 1066 .sec_func = true,
a794d8d8
GBY
1067 },
1068 {
1069 .name = "bitlocker(paes)",
1070 .driver_name = "bitlocker-paes-ccree",
1071 .blocksize = AES_BLOCK_SIZE,
1072 .template_skcipher = {
1073 .setkey = cc_cipher_sethkey,
1074 .encrypt = cc_cipher_encrypt,
1075 .decrypt = cc_cipher_decrypt,
1076 .min_keysize = CC_HW_KEY_SIZE,
1077 .max_keysize = CC_HW_KEY_SIZE,
1078 .ivsize = AES_BLOCK_SIZE,
1079 },
1080 .cipher_mode = DRV_CIPHER_BITLOCKER,
1081 .flow_mode = S_DIN_to_AES,
1082 .min_hw_rev = CC_HW_REV_712,
1c876a90 1083 .std_body = CC_STD_NIST,
f98f6e21 1084 .sec_func = true,
a794d8d8
GBY
1085 },
1086 {
1087 .name = "bitlocker512(paes)",
1088 .driver_name = "bitlocker-paes-du512-ccree",
1089 .blocksize = AES_BLOCK_SIZE,
1090 .template_skcipher = {
1091 .setkey = cc_cipher_sethkey,
1092 .encrypt = cc_cipher_encrypt,
1093 .decrypt = cc_cipher_decrypt,
1094 .min_keysize = CC_HW_KEY_SIZE,
1095 .max_keysize = CC_HW_KEY_SIZE,
1096 .ivsize = AES_BLOCK_SIZE,
1097 },
1098 .cipher_mode = DRV_CIPHER_BITLOCKER,
1099 .flow_mode = S_DIN_to_AES,
1100 .data_unit = 512,
1101 .min_hw_rev = CC_HW_REV_712,
1c876a90 1102 .std_body = CC_STD_NIST,
f98f6e21 1103 .sec_func = true,
a794d8d8
GBY
1104 },
1105 {
1106 .name = "bitlocker4096(paes)",
1107 .driver_name = "bitlocker-paes-du4096-ccree",
1108 .blocksize = AES_BLOCK_SIZE,
1109 .template_skcipher = {
1110 .setkey = cc_cipher_sethkey,
1111 .encrypt = cc_cipher_encrypt,
1112 .decrypt = cc_cipher_decrypt,
1113 .min_keysize = CC_HW_KEY_SIZE,
1114 .max_keysize = CC_HW_KEY_SIZE,
1115 .ivsize = AES_BLOCK_SIZE,
1116 },
1117 .cipher_mode = DRV_CIPHER_BITLOCKER,
1118 .flow_mode = S_DIN_to_AES,
1119 .data_unit = 4096,
1120 .min_hw_rev = CC_HW_REV_712,
1c876a90 1121 .std_body = CC_STD_NIST,
f98f6e21 1122 .sec_func = true,
a794d8d8
GBY
1123 },
1124 {
1125 .name = "ecb(paes)",
1126 .driver_name = "ecb-paes-ccree",
1127 .blocksize = AES_BLOCK_SIZE,
a794d8d8
GBY
1128 .template_skcipher = {
1129 .setkey = cc_cipher_sethkey,
1130 .encrypt = cc_cipher_encrypt,
1131 .decrypt = cc_cipher_decrypt,
1132 .min_keysize = CC_HW_KEY_SIZE,
1133 .max_keysize = CC_HW_KEY_SIZE,
1134 .ivsize = 0,
1135 },
1136 .cipher_mode = DRV_CIPHER_ECB,
1137 .flow_mode = S_DIN_to_AES,
1138 .min_hw_rev = CC_HW_REV_712,
1c876a90 1139 .std_body = CC_STD_NIST,
f98f6e21 1140 .sec_func = true,
a794d8d8
GBY
1141 },
1142 {
1143 .name = "cbc(paes)",
1144 .driver_name = "cbc-paes-ccree",
1145 .blocksize = AES_BLOCK_SIZE,
a794d8d8
GBY
1146 .template_skcipher = {
1147 .setkey = cc_cipher_sethkey,
1148 .encrypt = cc_cipher_encrypt,
1149 .decrypt = cc_cipher_decrypt,
1150 .min_keysize = CC_HW_KEY_SIZE,
1151 .max_keysize = CC_HW_KEY_SIZE,
1152 .ivsize = AES_BLOCK_SIZE,
1153 },
1154 .cipher_mode = DRV_CIPHER_CBC,
1155 .flow_mode = S_DIN_to_AES,
1156 .min_hw_rev = CC_HW_REV_712,
1c876a90 1157 .std_body = CC_STD_NIST,
f98f6e21 1158 .sec_func = true,
a794d8d8
GBY
1159 },
1160 {
1161 .name = "ofb(paes)",
1162 .driver_name = "ofb-paes-ccree",
1163 .blocksize = AES_BLOCK_SIZE,
a794d8d8
GBY
1164 .template_skcipher = {
1165 .setkey = cc_cipher_sethkey,
1166 .encrypt = cc_cipher_encrypt,
1167 .decrypt = cc_cipher_decrypt,
1168 .min_keysize = CC_HW_KEY_SIZE,
1169 .max_keysize = CC_HW_KEY_SIZE,
1170 .ivsize = AES_BLOCK_SIZE,
1171 },
1172 .cipher_mode = DRV_CIPHER_OFB,
1173 .flow_mode = S_DIN_to_AES,
1174 .min_hw_rev = CC_HW_REV_712,
1c876a90 1175 .std_body = CC_STD_NIST,
f98f6e21 1176 .sec_func = true,
a794d8d8
GBY
1177 },
1178 {
84f366da
GBY
1179 .name = "cts(cbc(paes))",
1180 .driver_name = "cts-cbc-paes-ccree",
a794d8d8 1181 .blocksize = AES_BLOCK_SIZE,
a794d8d8
GBY
1182 .template_skcipher = {
1183 .setkey = cc_cipher_sethkey,
1184 .encrypt = cc_cipher_encrypt,
1185 .decrypt = cc_cipher_decrypt,
1186 .min_keysize = CC_HW_KEY_SIZE,
1187 .max_keysize = CC_HW_KEY_SIZE,
1188 .ivsize = AES_BLOCK_SIZE,
1189 },
1190 .cipher_mode = DRV_CIPHER_CBC_CTS,
1191 .flow_mode = S_DIN_to_AES,
1192 .min_hw_rev = CC_HW_REV_712,
1c876a90 1193 .std_body = CC_STD_NIST,
f98f6e21 1194 .sec_func = true,
a794d8d8
GBY
1195 },
1196 {
1197 .name = "ctr(paes)",
1198 .driver_name = "ctr-paes-ccree",
1199 .blocksize = 1,
a794d8d8
GBY
1200 .template_skcipher = {
1201 .setkey = cc_cipher_sethkey,
1202 .encrypt = cc_cipher_encrypt,
1203 .decrypt = cc_cipher_decrypt,
1204 .min_keysize = CC_HW_KEY_SIZE,
1205 .max_keysize = CC_HW_KEY_SIZE,
1206 .ivsize = AES_BLOCK_SIZE,
1207 },
1208 .cipher_mode = DRV_CIPHER_CTR,
1209 .flow_mode = S_DIN_to_AES,
1210 .min_hw_rev = CC_HW_REV_712,
1c876a90 1211 .std_body = CC_STD_NIST,
f98f6e21 1212 .sec_func = true,
a794d8d8 1213 },
63ee04c8 1214 {
4aaefb62
GBY
1215 /* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html
1216 * for the reason why this differs from the generic
1217 * implementation.
1218 */
63ee04c8
GBY
1219 .name = "xts(aes)",
1220 .driver_name = "xts-aes-ccree",
67caef08 1221 .blocksize = 1,
63ee04c8
GBY
1222 .template_skcipher = {
1223 .setkey = cc_cipher_setkey,
1224 .encrypt = cc_cipher_encrypt,
1225 .decrypt = cc_cipher_decrypt,
1226 .min_keysize = AES_MIN_KEY_SIZE * 2,
1227 .max_keysize = AES_MAX_KEY_SIZE * 2,
1228 .ivsize = AES_BLOCK_SIZE,
1229 },
1230 .cipher_mode = DRV_CIPHER_XTS,
1231 .flow_mode = S_DIN_to_AES,
27b3b22d 1232 .min_hw_rev = CC_HW_REV_630,
1c876a90 1233 .std_body = CC_STD_NIST,
63ee04c8
GBY
1234 },
1235 {
1236 .name = "xts512(aes)",
1237 .driver_name = "xts-aes-du512-ccree",
67caef08 1238 .blocksize = 1,
63ee04c8
GBY
1239 .template_skcipher = {
1240 .setkey = cc_cipher_setkey,
1241 .encrypt = cc_cipher_encrypt,
1242 .decrypt = cc_cipher_decrypt,
1243 .min_keysize = AES_MIN_KEY_SIZE * 2,
1244 .max_keysize = AES_MAX_KEY_SIZE * 2,
1245 .ivsize = AES_BLOCK_SIZE,
1246 },
1247 .cipher_mode = DRV_CIPHER_XTS,
1248 .flow_mode = S_DIN_to_AES,
1249 .data_unit = 512,
27b3b22d 1250 .min_hw_rev = CC_HW_REV_712,
1c876a90 1251 .std_body = CC_STD_NIST,
63ee04c8
GBY
1252 },
1253 {
1254 .name = "xts4096(aes)",
1255 .driver_name = "xts-aes-du4096-ccree",
67caef08 1256 .blocksize = 1,
63ee04c8
GBY
1257 .template_skcipher = {
1258 .setkey = cc_cipher_setkey,
1259 .encrypt = cc_cipher_encrypt,
1260 .decrypt = cc_cipher_decrypt,
1261 .min_keysize = AES_MIN_KEY_SIZE * 2,
1262 .max_keysize = AES_MAX_KEY_SIZE * 2,
1263 .ivsize = AES_BLOCK_SIZE,
1264 },
1265 .cipher_mode = DRV_CIPHER_XTS,
1266 .flow_mode = S_DIN_to_AES,
1267 .data_unit = 4096,
27b3b22d 1268 .min_hw_rev = CC_HW_REV_712,
1c876a90 1269 .std_body = CC_STD_NIST,
63ee04c8
GBY
1270 },
1271 {
1272 .name = "essiv(aes)",
1273 .driver_name = "essiv-aes-ccree",
1274 .blocksize = AES_BLOCK_SIZE,
1275 .template_skcipher = {
1276 .setkey = cc_cipher_setkey,
1277 .encrypt = cc_cipher_encrypt,
1278 .decrypt = cc_cipher_decrypt,
1279 .min_keysize = AES_MIN_KEY_SIZE * 2,
1280 .max_keysize = AES_MAX_KEY_SIZE * 2,
1281 .ivsize = AES_BLOCK_SIZE,
1282 },
1283 .cipher_mode = DRV_CIPHER_ESSIV,
1284 .flow_mode = S_DIN_to_AES,
27b3b22d 1285 .min_hw_rev = CC_HW_REV_712,
1c876a90 1286 .std_body = CC_STD_NIST,
63ee04c8
GBY
1287 },
1288 {
1289 .name = "essiv512(aes)",
1290 .driver_name = "essiv-aes-du512-ccree",
1291 .blocksize = AES_BLOCK_SIZE,
1292 .template_skcipher = {
1293 .setkey = cc_cipher_setkey,
1294 .encrypt = cc_cipher_encrypt,
1295 .decrypt = cc_cipher_decrypt,
1296 .min_keysize = AES_MIN_KEY_SIZE * 2,
1297 .max_keysize = AES_MAX_KEY_SIZE * 2,
1298 .ivsize = AES_BLOCK_SIZE,
1299 },
1300 .cipher_mode = DRV_CIPHER_ESSIV,
1301 .flow_mode = S_DIN_to_AES,
1302 .data_unit = 512,
27b3b22d 1303 .min_hw_rev = CC_HW_REV_712,
1c876a90 1304 .std_body = CC_STD_NIST,
63ee04c8
GBY
1305 },
1306 {
1307 .name = "essiv4096(aes)",
1308 .driver_name = "essiv-aes-du4096-ccree",
1309 .blocksize = AES_BLOCK_SIZE,
1310 .template_skcipher = {
1311 .setkey = cc_cipher_setkey,
1312 .encrypt = cc_cipher_encrypt,
1313 .decrypt = cc_cipher_decrypt,
1314 .min_keysize = AES_MIN_KEY_SIZE * 2,
1315 .max_keysize = AES_MAX_KEY_SIZE * 2,
1316 .ivsize = AES_BLOCK_SIZE,
1317 },
1318 .cipher_mode = DRV_CIPHER_ESSIV,
1319 .flow_mode = S_DIN_to_AES,
1320 .data_unit = 4096,
27b3b22d 1321 .min_hw_rev = CC_HW_REV_712,
1c876a90 1322 .std_body = CC_STD_NIST,
63ee04c8
GBY
1323 },
1324 {
1325 .name = "bitlocker(aes)",
1326 .driver_name = "bitlocker-aes-ccree",
1327 .blocksize = AES_BLOCK_SIZE,
1328 .template_skcipher = {
1329 .setkey = cc_cipher_setkey,
1330 .encrypt = cc_cipher_encrypt,
1331 .decrypt = cc_cipher_decrypt,
1332 .min_keysize = AES_MIN_KEY_SIZE * 2,
1333 .max_keysize = AES_MAX_KEY_SIZE * 2,
1334 .ivsize = AES_BLOCK_SIZE,
1335 },
1336 .cipher_mode = DRV_CIPHER_BITLOCKER,
1337 .flow_mode = S_DIN_to_AES,
27b3b22d 1338 .min_hw_rev = CC_HW_REV_712,
1c876a90 1339 .std_body = CC_STD_NIST,
63ee04c8
GBY
1340 },
1341 {
1342 .name = "bitlocker512(aes)",
1343 .driver_name = "bitlocker-aes-du512-ccree",
1344 .blocksize = AES_BLOCK_SIZE,
1345 .template_skcipher = {
1346 .setkey = cc_cipher_setkey,
1347 .encrypt = cc_cipher_encrypt,
1348 .decrypt = cc_cipher_decrypt,
1349 .min_keysize = AES_MIN_KEY_SIZE * 2,
1350 .max_keysize = AES_MAX_KEY_SIZE * 2,
1351 .ivsize = AES_BLOCK_SIZE,
1352 },
1353 .cipher_mode = DRV_CIPHER_BITLOCKER,
1354 .flow_mode = S_DIN_to_AES,
1355 .data_unit = 512,
27b3b22d 1356 .min_hw_rev = CC_HW_REV_712,
1c876a90 1357 .std_body = CC_STD_NIST,
63ee04c8
GBY
1358 },
1359 {
1360 .name = "bitlocker4096(aes)",
1361 .driver_name = "bitlocker-aes-du4096-ccree",
1362 .blocksize = AES_BLOCK_SIZE,
1363 .template_skcipher = {
1364 .setkey = cc_cipher_setkey,
1365 .encrypt = cc_cipher_encrypt,
1366 .decrypt = cc_cipher_decrypt,
1367 .min_keysize = AES_MIN_KEY_SIZE * 2,
1368 .max_keysize = AES_MAX_KEY_SIZE * 2,
1369 .ivsize = AES_BLOCK_SIZE,
1370 },
1371 .cipher_mode = DRV_CIPHER_BITLOCKER,
1372 .flow_mode = S_DIN_to_AES,
1373 .data_unit = 4096,
27b3b22d 1374 .min_hw_rev = CC_HW_REV_712,
1c876a90 1375 .std_body = CC_STD_NIST,
63ee04c8
GBY
1376 },
1377 {
1378 .name = "ecb(aes)",
1379 .driver_name = "ecb-aes-ccree",
1380 .blocksize = AES_BLOCK_SIZE,
63ee04c8
GBY
1381 .template_skcipher = {
1382 .setkey = cc_cipher_setkey,
1383 .encrypt = cc_cipher_encrypt,
1384 .decrypt = cc_cipher_decrypt,
1385 .min_keysize = AES_MIN_KEY_SIZE,
1386 .max_keysize = AES_MAX_KEY_SIZE,
1387 .ivsize = 0,
1388 },
1389 .cipher_mode = DRV_CIPHER_ECB,
1390 .flow_mode = S_DIN_to_AES,
27b3b22d 1391 .min_hw_rev = CC_HW_REV_630,
1c876a90 1392 .std_body = CC_STD_NIST,
63ee04c8
GBY
1393 },
1394 {
1395 .name = "cbc(aes)",
1396 .driver_name = "cbc-aes-ccree",
1397 .blocksize = AES_BLOCK_SIZE,
63ee04c8
GBY
1398 .template_skcipher = {
1399 .setkey = cc_cipher_setkey,
1400 .encrypt = cc_cipher_encrypt,
1401 .decrypt = cc_cipher_decrypt,
1402 .min_keysize = AES_MIN_KEY_SIZE,
1403 .max_keysize = AES_MAX_KEY_SIZE,
1404 .ivsize = AES_BLOCK_SIZE,
1405 },
1406 .cipher_mode = DRV_CIPHER_CBC,
1407 .flow_mode = S_DIN_to_AES,
27b3b22d 1408 .min_hw_rev = CC_HW_REV_630,
1c876a90 1409 .std_body = CC_STD_NIST,
63ee04c8
GBY
1410 },
1411 {
1412 .name = "ofb(aes)",
1413 .driver_name = "ofb-aes-ccree",
4aaefb62 1414 .blocksize = 1,
63ee04c8
GBY
1415 .template_skcipher = {
1416 .setkey = cc_cipher_setkey,
1417 .encrypt = cc_cipher_encrypt,
1418 .decrypt = cc_cipher_decrypt,
1419 .min_keysize = AES_MIN_KEY_SIZE,
1420 .max_keysize = AES_MAX_KEY_SIZE,
1421 .ivsize = AES_BLOCK_SIZE,
1422 },
1423 .cipher_mode = DRV_CIPHER_OFB,
1424 .flow_mode = S_DIN_to_AES,
27b3b22d 1425 .min_hw_rev = CC_HW_REV_630,
1c876a90 1426 .std_body = CC_STD_NIST,
63ee04c8
GBY
1427 },
1428 {
84f366da
GBY
1429 .name = "cts(cbc(aes))",
1430 .driver_name = "cts-cbc-aes-ccree",
63ee04c8 1431 .blocksize = AES_BLOCK_SIZE,
63ee04c8
GBY
1432 .template_skcipher = {
1433 .setkey = cc_cipher_setkey,
1434 .encrypt = cc_cipher_encrypt,
1435 .decrypt = cc_cipher_decrypt,
1436 .min_keysize = AES_MIN_KEY_SIZE,
1437 .max_keysize = AES_MAX_KEY_SIZE,
1438 .ivsize = AES_BLOCK_SIZE,
1439 },
1440 .cipher_mode = DRV_CIPHER_CBC_CTS,
1441 .flow_mode = S_DIN_to_AES,
27b3b22d 1442 .min_hw_rev = CC_HW_REV_630,
1c876a90 1443 .std_body = CC_STD_NIST,
63ee04c8
GBY
1444 },
1445 {
1446 .name = "ctr(aes)",
1447 .driver_name = "ctr-aes-ccree",
1448 .blocksize = 1,
63ee04c8
GBY
1449 .template_skcipher = {
1450 .setkey = cc_cipher_setkey,
1451 .encrypt = cc_cipher_encrypt,
1452 .decrypt = cc_cipher_decrypt,
1453 .min_keysize = AES_MIN_KEY_SIZE,
1454 .max_keysize = AES_MAX_KEY_SIZE,
1455 .ivsize = AES_BLOCK_SIZE,
1456 },
1457 .cipher_mode = DRV_CIPHER_CTR,
1458 .flow_mode = S_DIN_to_AES,
27b3b22d 1459 .min_hw_rev = CC_HW_REV_630,
1c876a90 1460 .std_body = CC_STD_NIST,
63ee04c8
GBY
1461 },
1462 {
1463 .name = "cbc(des3_ede)",
1464 .driver_name = "cbc-3des-ccree",
1465 .blocksize = DES3_EDE_BLOCK_SIZE,
63ee04c8
GBY
1466 .template_skcipher = {
1467 .setkey = cc_cipher_setkey,
1468 .encrypt = cc_cipher_encrypt,
1469 .decrypt = cc_cipher_decrypt,
1470 .min_keysize = DES3_EDE_KEY_SIZE,
1471 .max_keysize = DES3_EDE_KEY_SIZE,
1472 .ivsize = DES3_EDE_BLOCK_SIZE,
1473 },
1474 .cipher_mode = DRV_CIPHER_CBC,
1475 .flow_mode = S_DIN_to_DES,
27b3b22d 1476 .min_hw_rev = CC_HW_REV_630,
1c876a90 1477 .std_body = CC_STD_NIST,
63ee04c8
GBY
1478 },
1479 {
1480 .name = "ecb(des3_ede)",
1481 .driver_name = "ecb-3des-ccree",
1482 .blocksize = DES3_EDE_BLOCK_SIZE,
63ee04c8
GBY
1483 .template_skcipher = {
1484 .setkey = cc_cipher_setkey,
1485 .encrypt = cc_cipher_encrypt,
1486 .decrypt = cc_cipher_decrypt,
1487 .min_keysize = DES3_EDE_KEY_SIZE,
1488 .max_keysize = DES3_EDE_KEY_SIZE,
1489 .ivsize = 0,
1490 },
1491 .cipher_mode = DRV_CIPHER_ECB,
1492 .flow_mode = S_DIN_to_DES,
27b3b22d 1493 .min_hw_rev = CC_HW_REV_630,
1c876a90 1494 .std_body = CC_STD_NIST,
63ee04c8
GBY
1495 },
1496 {
1497 .name = "cbc(des)",
1498 .driver_name = "cbc-des-ccree",
1499 .blocksize = DES_BLOCK_SIZE,
63ee04c8
GBY
1500 .template_skcipher = {
1501 .setkey = cc_cipher_setkey,
1502 .encrypt = cc_cipher_encrypt,
1503 .decrypt = cc_cipher_decrypt,
1504 .min_keysize = DES_KEY_SIZE,
1505 .max_keysize = DES_KEY_SIZE,
1506 .ivsize = DES_BLOCK_SIZE,
1507 },
1508 .cipher_mode = DRV_CIPHER_CBC,
1509 .flow_mode = S_DIN_to_DES,
27b3b22d 1510 .min_hw_rev = CC_HW_REV_630,
1c876a90 1511 .std_body = CC_STD_NIST,
63ee04c8
GBY
1512 },
1513 {
1514 .name = "ecb(des)",
1515 .driver_name = "ecb-des-ccree",
1516 .blocksize = DES_BLOCK_SIZE,
63ee04c8
GBY
1517 .template_skcipher = {
1518 .setkey = cc_cipher_setkey,
1519 .encrypt = cc_cipher_encrypt,
1520 .decrypt = cc_cipher_decrypt,
1521 .min_keysize = DES_KEY_SIZE,
1522 .max_keysize = DES_KEY_SIZE,
1523 .ivsize = 0,
1524 },
1525 .cipher_mode = DRV_CIPHER_ECB,
1526 .flow_mode = S_DIN_to_DES,
27b3b22d 1527 .min_hw_rev = CC_HW_REV_630,
1c876a90 1528 .std_body = CC_STD_NIST,
63ee04c8 1529 },
9b8d51f8
GBY
1530 {
1531 .name = "cbc(sm4)",
1532 .driver_name = "cbc-sm4-ccree",
1533 .blocksize = SM4_BLOCK_SIZE,
1534 .template_skcipher = {
1535 .setkey = cc_cipher_setkey,
1536 .encrypt = cc_cipher_encrypt,
1537 .decrypt = cc_cipher_decrypt,
1538 .min_keysize = SM4_KEY_SIZE,
1539 .max_keysize = SM4_KEY_SIZE,
1540 .ivsize = SM4_BLOCK_SIZE,
1541 },
1542 .cipher_mode = DRV_CIPHER_CBC,
1543 .flow_mode = S_DIN_to_SM4,
1544 .min_hw_rev = CC_HW_REV_713,
1c876a90 1545 .std_body = CC_STD_OSCCA,
9b8d51f8
GBY
1546 },
1547 {
1548 .name = "ecb(sm4)",
1549 .driver_name = "ecb-sm4-ccree",
1550 .blocksize = SM4_BLOCK_SIZE,
1551 .template_skcipher = {
1552 .setkey = cc_cipher_setkey,
1553 .encrypt = cc_cipher_encrypt,
1554 .decrypt = cc_cipher_decrypt,
1555 .min_keysize = SM4_KEY_SIZE,
1556 .max_keysize = SM4_KEY_SIZE,
1557 .ivsize = 0,
1558 },
1559 .cipher_mode = DRV_CIPHER_ECB,
1560 .flow_mode = S_DIN_to_SM4,
1561 .min_hw_rev = CC_HW_REV_713,
1c876a90 1562 .std_body = CC_STD_OSCCA,
9b8d51f8
GBY
1563 },
1564 {
1565 .name = "ctr(sm4)",
1566 .driver_name = "ctr-sm4-ccree",
4aaefb62 1567 .blocksize = 1,
9b8d51f8
GBY
1568 .template_skcipher = {
1569 .setkey = cc_cipher_setkey,
1570 .encrypt = cc_cipher_encrypt,
1571 .decrypt = cc_cipher_decrypt,
1572 .min_keysize = SM4_KEY_SIZE,
1573 .max_keysize = SM4_KEY_SIZE,
1574 .ivsize = SM4_BLOCK_SIZE,
1575 },
1576 .cipher_mode = DRV_CIPHER_CTR,
1577 .flow_mode = S_DIN_to_SM4,
1578 .min_hw_rev = CC_HW_REV_713,
1c876a90 1579 .std_body = CC_STD_OSCCA,
9b8d51f8 1580 },
bee711fa
GBY
1581 {
1582 .name = "cbc(psm4)",
1583 .driver_name = "cbc-psm4-ccree",
1584 .blocksize = SM4_BLOCK_SIZE,
1585 .template_skcipher = {
1586 .setkey = cc_cipher_sethkey,
1587 .encrypt = cc_cipher_encrypt,
1588 .decrypt = cc_cipher_decrypt,
1589 .min_keysize = CC_HW_KEY_SIZE,
1590 .max_keysize = CC_HW_KEY_SIZE,
1591 .ivsize = SM4_BLOCK_SIZE,
1592 },
1593 .cipher_mode = DRV_CIPHER_CBC,
1594 .flow_mode = S_DIN_to_SM4,
1595 .min_hw_rev = CC_HW_REV_713,
1596 .std_body = CC_STD_OSCCA,
1597 .sec_func = true,
1598 },
1599 {
1600 .name = "ctr(psm4)",
1601 .driver_name = "ctr-psm4-ccree",
1602 .blocksize = SM4_BLOCK_SIZE,
1603 .template_skcipher = {
1604 .setkey = cc_cipher_sethkey,
1605 .encrypt = cc_cipher_encrypt,
1606 .decrypt = cc_cipher_decrypt,
1607 .min_keysize = CC_HW_KEY_SIZE,
1608 .max_keysize = CC_HW_KEY_SIZE,
1609 .ivsize = SM4_BLOCK_SIZE,
1610 },
1611 .cipher_mode = DRV_CIPHER_CTR,
1612 .flow_mode = S_DIN_to_SM4,
1613 .min_hw_rev = CC_HW_REV_713,
1614 .std_body = CC_STD_OSCCA,
1615 .sec_func = true,
1616 },
63ee04c8
GBY
1617};
1618
1619static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1620 struct device *dev)
1621{
1622 struct cc_crypto_alg *t_alg;
1623 struct skcipher_alg *alg;
1624
f7c8f992 1625 t_alg = devm_kzalloc(dev, sizeof(*t_alg), GFP_KERNEL);
63ee04c8
GBY
1626 if (!t_alg)
1627 return ERR_PTR(-ENOMEM);
1628
1629 alg = &t_alg->skcipher_alg;
1630
1631 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1632
1633 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1634 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1635 tmpl->driver_name);
1636 alg->base.cra_module = THIS_MODULE;
1637 alg->base.cra_priority = CC_CRA_PRIO;
1638 alg->base.cra_blocksize = tmpl->blocksize;
1639 alg->base.cra_alignmask = 0;
1640 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1641
1642 alg->base.cra_init = cc_cipher_init;
1643 alg->base.cra_exit = cc_cipher_exit;
2c95e6d9 1644 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
63ee04c8
GBY
1645
1646 t_alg->cipher_mode = tmpl->cipher_mode;
1647 t_alg->flow_mode = tmpl->flow_mode;
1648 t_alg->data_unit = tmpl->data_unit;
1649
1650 return t_alg;
1651}
1652
1653int cc_cipher_free(struct cc_drvdata *drvdata)
1654{
1655 struct cc_crypto_alg *t_alg, *n;
c23d7997
GU
1656
1657 /* Remove registered algs */
1658 list_for_each_entry_safe(t_alg, n, &drvdata->alg_list, entry) {
1659 crypto_unregister_skcipher(&t_alg->skcipher_alg);
1660 list_del(&t_alg->entry);
63ee04c8
GBY
1661 }
1662 return 0;
1663}
1664
1665int cc_cipher_alloc(struct cc_drvdata *drvdata)
1666{
63ee04c8
GBY
1667 struct cc_crypto_alg *t_alg;
1668 struct device *dev = drvdata_to_dev(drvdata);
1669 int rc = -ENOMEM;
1670 int alg;
1671
c23d7997 1672 INIT_LIST_HEAD(&drvdata->alg_list);
63ee04c8
GBY
1673
1674 /* Linux crypto */
1675 dev_dbg(dev, "Number of algorithms = %zu\n",
1676 ARRAY_SIZE(skcipher_algs));
1677 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1c876a90 1678 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
f98f6e21
GBY
1679 !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1680 (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
27b3b22d
GBY
1681 continue;
1682
63ee04c8
GBY
1683 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1684 t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1685 if (IS_ERR(t_alg)) {
1686 rc = PTR_ERR(t_alg);
1687 dev_err(dev, "%s alg allocation failed\n",
1688 skcipher_algs[alg].driver_name);
1689 goto fail0;
1690 }
1691 t_alg->drvdata = drvdata;
1692
1693 dev_dbg(dev, "registering %s\n",
1694 skcipher_algs[alg].driver_name);
1695 rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1696 dev_dbg(dev, "%s alg registration rc = %x\n",
1697 t_alg->skcipher_alg.base.cra_driver_name, rc);
1698 if (rc) {
1699 dev_err(dev, "%s alg registration failed\n",
1700 t_alg->skcipher_alg.base.cra_driver_name);
63ee04c8 1701 goto fail0;
63ee04c8 1702 }
ff4d719a
GU
1703
1704 list_add_tail(&t_alg->entry, &drvdata->alg_list);
1705 dev_dbg(dev, "Registered %s\n",
1706 t_alg->skcipher_alg.base.cra_driver_name);
63ee04c8
GBY
1707 }
1708 return 0;
1709
1710fail0:
1711 cc_cipher_free(drvdata);
1712 return rc;
1713}