]> git.ipfire.org Git - thirdparty/kernel/linux.git/blame - drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c
treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 422
[thirdparty/kernel/linux.git] / drivers / crypto / rockchip / rk3288_crypto_ablkcipher.c
CommitLineData
75a6faf6 1// SPDX-License-Identifier: GPL-2.0-only
433cd2c6
ZW
2/*
3 * Crypto acceleration support for Rockchip RK3288
4 *
5 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
6 *
7 * Author: Zain Wang <zain.wang@rock-chips.com>
8 *
433cd2c6
ZW
9 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
10 */
11#include "rk3288_crypto.h"
12
13#define RK_CRYPTO_DEC BIT(0)
14
5a7801f6 15static void rk_crypto_complete(struct crypto_async_request *base, int err)
433cd2c6 16{
5a7801f6
ZW
17 if (base->complete)
18 base->complete(base, err);
433cd2c6
ZW
19}
20
21static int rk_handle_req(struct rk_crypto_info *dev,
22 struct ablkcipher_request *req)
23{
433cd2c6
ZW
24 if (!IS_ALIGNED(req->nbytes, dev->align_size))
25 return -EINVAL;
5a7801f6
ZW
26 else
27 return dev->enqueue(dev, &req->base);
433cd2c6
ZW
28}
29
30static int rk_aes_setkey(struct crypto_ablkcipher *cipher,
31 const u8 *key, unsigned int keylen)
32{
33 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
34 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
35
36 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
37 keylen != AES_KEYSIZE_256) {
38 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
39 return -EINVAL;
40 }
41 ctx->keylen = keylen;
42 memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
43 return 0;
44}
45
1ad2267c
HX
46static int rk_des_setkey(struct crypto_ablkcipher *cipher,
47 const u8 *key, unsigned int keylen)
433cd2c6
ZW
48{
49 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
50 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
51 u32 tmp[DES_EXPKEY_WORDS];
52
1ad2267c
HX
53 if (!des_ekey(tmp, key) &&
54 (tfm->crt_flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
55 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
433cd2c6
ZW
56 return -EINVAL;
57 }
58
1ad2267c
HX
59 ctx->keylen = keylen;
60 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
61 return 0;
62}
63
64static int rk_tdes_setkey(struct crypto_ablkcipher *cipher,
65 const u8 *key, unsigned int keylen)
66{
67 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
68 u32 flags;
69 int err;
70
71 flags = crypto_ablkcipher_get_flags(cipher);
72 err = __des3_verify_key(&flags, key);
73 if (unlikely(err)) {
74 crypto_ablkcipher_set_flags(cipher, flags);
75 return err;
433cd2c6
ZW
76 }
77
78 ctx->keylen = keylen;
79 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
80 return 0;
81}
82
83static int rk_aes_ecb_encrypt(struct ablkcipher_request *req)
84{
85 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
86 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
87 struct rk_crypto_info *dev = ctx->dev;
88
5a7801f6 89 ctx->mode = RK_CRYPTO_AES_ECB_MODE;
433cd2c6
ZW
90 return rk_handle_req(dev, req);
91}
92
93static int rk_aes_ecb_decrypt(struct ablkcipher_request *req)
94{
95 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
96 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
97 struct rk_crypto_info *dev = ctx->dev;
98
5a7801f6 99 ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
433cd2c6
ZW
100 return rk_handle_req(dev, req);
101}
102
103static int rk_aes_cbc_encrypt(struct ablkcipher_request *req)
104{
105 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
106 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
107 struct rk_crypto_info *dev = ctx->dev;
108
5a7801f6 109 ctx->mode = RK_CRYPTO_AES_CBC_MODE;
433cd2c6
ZW
110 return rk_handle_req(dev, req);
111}
112
113static int rk_aes_cbc_decrypt(struct ablkcipher_request *req)
114{
115 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
116 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
117 struct rk_crypto_info *dev = ctx->dev;
118
5a7801f6 119 ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
433cd2c6
ZW
120 return rk_handle_req(dev, req);
121}
122
123static int rk_des_ecb_encrypt(struct ablkcipher_request *req)
124{
125 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
126 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
127 struct rk_crypto_info *dev = ctx->dev;
128
5a7801f6 129 ctx->mode = 0;
433cd2c6
ZW
130 return rk_handle_req(dev, req);
131}
132
133static int rk_des_ecb_decrypt(struct ablkcipher_request *req)
134{
135 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
136 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
137 struct rk_crypto_info *dev = ctx->dev;
138
5a7801f6 139 ctx->mode = RK_CRYPTO_DEC;
433cd2c6
ZW
140 return rk_handle_req(dev, req);
141}
142
143static int rk_des_cbc_encrypt(struct ablkcipher_request *req)
144{
145 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
146 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
147 struct rk_crypto_info *dev = ctx->dev;
148
5a7801f6 149 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
433cd2c6
ZW
150 return rk_handle_req(dev, req);
151}
152
153static int rk_des_cbc_decrypt(struct ablkcipher_request *req)
154{
155 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
156 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
157 struct rk_crypto_info *dev = ctx->dev;
158
5a7801f6 159 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
433cd2c6
ZW
160 return rk_handle_req(dev, req);
161}
162
163static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request *req)
164{
165 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
166 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
167 struct rk_crypto_info *dev = ctx->dev;
168
5a7801f6 169 ctx->mode = RK_CRYPTO_TDES_SELECT;
433cd2c6
ZW
170 return rk_handle_req(dev, req);
171}
172
173static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request *req)
174{
175 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
176 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
177 struct rk_crypto_info *dev = ctx->dev;
178
5a7801f6 179 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
433cd2c6
ZW
180 return rk_handle_req(dev, req);
181}
182
183static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request *req)
184{
185 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
186 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
187 struct rk_crypto_info *dev = ctx->dev;
188
5a7801f6 189 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
433cd2c6
ZW
190 return rk_handle_req(dev, req);
191}
192
193static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request *req)
194{
195 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
196 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
197 struct rk_crypto_info *dev = ctx->dev;
198
5a7801f6 199 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
433cd2c6
ZW
200 RK_CRYPTO_DEC;
201 return rk_handle_req(dev, req);
202}
203
204static void rk_ablk_hw_init(struct rk_crypto_info *dev)
205{
5a7801f6
ZW
206 struct ablkcipher_request *req =
207 ablkcipher_request_cast(dev->async_req);
208 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(req);
433cd2c6
ZW
209 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
210 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
211 u32 ivsize, block, conf_reg = 0;
212
213 block = crypto_tfm_alg_blocksize(tfm);
214 ivsize = crypto_ablkcipher_ivsize(cipher);
215
216 if (block == DES_BLOCK_SIZE) {
5a7801f6 217 ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
433cd2c6
ZW
218 RK_CRYPTO_TDES_BYTESWAP_KEY |
219 RK_CRYPTO_TDES_BYTESWAP_IV;
5a7801f6
ZW
220 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
221 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->info, ivsize);
433cd2c6
ZW
222 conf_reg = RK_CRYPTO_DESSEL;
223 } else {
5a7801f6 224 ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
433cd2c6
ZW
225 RK_CRYPTO_AES_KEY_CHANGE |
226 RK_CRYPTO_AES_BYTESWAP_KEY |
227 RK_CRYPTO_AES_BYTESWAP_IV;
228 if (ctx->keylen == AES_KEYSIZE_192)
5a7801f6 229 ctx->mode |= RK_CRYPTO_AES_192BIT_key;
433cd2c6 230 else if (ctx->keylen == AES_KEYSIZE_256)
5a7801f6
ZW
231 ctx->mode |= RK_CRYPTO_AES_256BIT_key;
232 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
233 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->info, ivsize);
433cd2c6
ZW
234 }
235 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
236 RK_CRYPTO_BYTESWAP_BRFIFO;
237 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
238 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
239 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
240}
241
242static void crypto_dma_start(struct rk_crypto_info *dev)
243{
244 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
245 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
246 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
247 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
248 _SBF(RK_CRYPTO_BLOCK_START, 16));
249}
250
251static int rk_set_data_start(struct rk_crypto_info *dev)
252{
253 int err;
c1c214ad
ZZ
254 struct ablkcipher_request *req =
255 ablkcipher_request_cast(dev->async_req);
256 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
257 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
258 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
259 u8 *src_last_blk = page_address(sg_page(dev->sg_src)) +
260 dev->sg_src->offset + dev->sg_src->length - ivsize;
261
f0cfd57b
ZZ
262 /* Store the iv that need to be updated in chain mode.
263 * And update the IV buffer to contain the next IV for decryption mode.
264 */
265 if (ctx->mode & RK_CRYPTO_DEC) {
c1c214ad 266 memcpy(ctx->iv, src_last_blk, ivsize);
f0cfd57b
ZZ
267 sg_pcopy_to_buffer(dev->first, dev->src_nents, req->info,
268 ivsize, dev->total - ivsize);
269 }
433cd2c6
ZW
270
271 err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
272 if (!err)
273 crypto_dma_start(dev);
274 return err;
275}
276
277static int rk_ablk_start(struct rk_crypto_info *dev)
278{
5a7801f6
ZW
279 struct ablkcipher_request *req =
280 ablkcipher_request_cast(dev->async_req);
ac7c8e6b 281 unsigned long flags;
5a7801f6
ZW
282 int err = 0;
283
284 dev->left_bytes = req->nbytes;
285 dev->total = req->nbytes;
286 dev->sg_src = req->src;
287 dev->first = req->src;
4359669a 288 dev->src_nents = sg_nents(req->src);
5a7801f6 289 dev->sg_dst = req->dst;
4359669a 290 dev->dst_nents = sg_nents(req->dst);
5a7801f6 291 dev->aligned = 1;
433cd2c6 292
ac7c8e6b 293 spin_lock_irqsave(&dev->lock, flags);
433cd2c6
ZW
294 rk_ablk_hw_init(dev);
295 err = rk_set_data_start(dev);
ac7c8e6b 296 spin_unlock_irqrestore(&dev->lock, flags);
433cd2c6
ZW
297 return err;
298}
299
300static void rk_iv_copyback(struct rk_crypto_info *dev)
301{
5a7801f6
ZW
302 struct ablkcipher_request *req =
303 ablkcipher_request_cast(dev->async_req);
304 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
f0cfd57b 305 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
433cd2c6
ZW
306 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
307
f0cfd57b
ZZ
308 /* Update the IV buffer to contain the next IV for encryption mode. */
309 if (!(ctx->mode & RK_CRYPTO_DEC)) {
310 if (dev->aligned) {
311 memcpy(req->info, sg_virt(dev->sg_dst) +
312 dev->sg_dst->length - ivsize, ivsize);
313 } else {
314 memcpy(req->info, dev->addr_vir +
315 dev->count - ivsize, ivsize);
316 }
317 }
433cd2c6
ZW
318}
319
c1c214ad
ZZ
320static void rk_update_iv(struct rk_crypto_info *dev)
321{
322 struct ablkcipher_request *req =
323 ablkcipher_request_cast(dev->async_req);
324 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
325 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
326 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
327 u8 *new_iv = NULL;
328
329 if (ctx->mode & RK_CRYPTO_DEC) {
330 new_iv = ctx->iv;
331 } else {
332 new_iv = page_address(sg_page(dev->sg_dst)) +
333 dev->sg_dst->offset + dev->sg_dst->length - ivsize;
334 }
335
336 if (ivsize == DES_BLOCK_SIZE)
337 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, new_iv, ivsize);
338 else if (ivsize == AES_BLOCK_SIZE)
339 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, new_iv, ivsize);
340}
341
433cd2c6
ZW
342/* return:
343 * true some err was occurred
344 * fault no err, continue
345 */
346static int rk_ablk_rx(struct rk_crypto_info *dev)
347{
348 int err = 0;
5a7801f6
ZW
349 struct ablkcipher_request *req =
350 ablkcipher_request_cast(dev->async_req);
433cd2c6
ZW
351
352 dev->unload_data(dev);
353 if (!dev->aligned) {
4359669a 354 if (!sg_pcopy_from_buffer(req->dst, dev->dst_nents,
433cd2c6
ZW
355 dev->addr_vir, dev->count,
356 dev->total - dev->left_bytes -
357 dev->count)) {
358 err = -EINVAL;
359 goto out_rx;
360 }
361 }
362 if (dev->left_bytes) {
c1c214ad 363 rk_update_iv(dev);
433cd2c6
ZW
364 if (dev->aligned) {
365 if (sg_is_last(dev->sg_src)) {
366 dev_err(dev->dev, "[%s:%d] Lack of data\n",
367 __func__, __LINE__);
368 err = -ENOMEM;
369 goto out_rx;
370 }
371 dev->sg_src = sg_next(dev->sg_src);
372 dev->sg_dst = sg_next(dev->sg_dst);
373 }
374 err = rk_set_data_start(dev);
375 } else {
376 rk_iv_copyback(dev);
377 /* here show the calculation is over without any err */
5a7801f6
ZW
378 dev->complete(dev->async_req, 0);
379 tasklet_schedule(&dev->queue_task);
433cd2c6
ZW
380 }
381out_rx:
382 return err;
383}
384
385static int rk_ablk_cra_init(struct crypto_tfm *tfm)
386{
387 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
388 struct crypto_alg *alg = tfm->__crt_alg;
389 struct rk_crypto_tmp *algt;
390
bfd927ff 391 algt = container_of(alg, struct rk_crypto_tmp, alg.crypto);
433cd2c6
ZW
392
393 ctx->dev = algt->dev;
394 ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
395 ctx->dev->start = rk_ablk_start;
396 ctx->dev->update = rk_ablk_rx;
397 ctx->dev->complete = rk_crypto_complete;
398 ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
399
400 return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
401}
402
403static void rk_ablk_cra_exit(struct crypto_tfm *tfm)
404{
405 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
406
407 free_page((unsigned long)ctx->dev->addr_vir);
408 ctx->dev->disable_clk(ctx->dev);
409}
410
411struct rk_crypto_tmp rk_ecb_aes_alg = {
bfd927ff
ZW
412 .type = ALG_TYPE_CIPHER,
413 .alg.crypto = {
433cd2c6
ZW
414 .cra_name = "ecb(aes)",
415 .cra_driver_name = "ecb-aes-rk",
416 .cra_priority = 300,
417 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
418 CRYPTO_ALG_ASYNC,
419 .cra_blocksize = AES_BLOCK_SIZE,
420 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
421 .cra_alignmask = 0x0f,
422 .cra_type = &crypto_ablkcipher_type,
423 .cra_module = THIS_MODULE,
424 .cra_init = rk_ablk_cra_init,
425 .cra_exit = rk_ablk_cra_exit,
426 .cra_u.ablkcipher = {
427 .min_keysize = AES_MIN_KEY_SIZE,
428 .max_keysize = AES_MAX_KEY_SIZE,
429 .setkey = rk_aes_setkey,
430 .encrypt = rk_aes_ecb_encrypt,
431 .decrypt = rk_aes_ecb_decrypt,
432 }
433 }
434};
435
436struct rk_crypto_tmp rk_cbc_aes_alg = {
bfd927ff
ZW
437 .type = ALG_TYPE_CIPHER,
438 .alg.crypto = {
433cd2c6
ZW
439 .cra_name = "cbc(aes)",
440 .cra_driver_name = "cbc-aes-rk",
441 .cra_priority = 300,
442 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
443 CRYPTO_ALG_ASYNC,
444 .cra_blocksize = AES_BLOCK_SIZE,
445 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
446 .cra_alignmask = 0x0f,
447 .cra_type = &crypto_ablkcipher_type,
448 .cra_module = THIS_MODULE,
449 .cra_init = rk_ablk_cra_init,
450 .cra_exit = rk_ablk_cra_exit,
451 .cra_u.ablkcipher = {
452 .min_keysize = AES_MIN_KEY_SIZE,
453 .max_keysize = AES_MAX_KEY_SIZE,
454 .ivsize = AES_BLOCK_SIZE,
455 .setkey = rk_aes_setkey,
456 .encrypt = rk_aes_cbc_encrypt,
457 .decrypt = rk_aes_cbc_decrypt,
458 }
459 }
460};
461
462struct rk_crypto_tmp rk_ecb_des_alg = {
bfd927ff
ZW
463 .type = ALG_TYPE_CIPHER,
464 .alg.crypto = {
433cd2c6
ZW
465 .cra_name = "ecb(des)",
466 .cra_driver_name = "ecb-des-rk",
467 .cra_priority = 300,
468 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
469 CRYPTO_ALG_ASYNC,
470 .cra_blocksize = DES_BLOCK_SIZE,
471 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
472 .cra_alignmask = 0x07,
473 .cra_type = &crypto_ablkcipher_type,
474 .cra_module = THIS_MODULE,
475 .cra_init = rk_ablk_cra_init,
476 .cra_exit = rk_ablk_cra_exit,
477 .cra_u.ablkcipher = {
478 .min_keysize = DES_KEY_SIZE,
479 .max_keysize = DES_KEY_SIZE,
1ad2267c 480 .setkey = rk_des_setkey,
433cd2c6
ZW
481 .encrypt = rk_des_ecb_encrypt,
482 .decrypt = rk_des_ecb_decrypt,
483 }
484 }
485};
486
487struct rk_crypto_tmp rk_cbc_des_alg = {
bfd927ff
ZW
488 .type = ALG_TYPE_CIPHER,
489 .alg.crypto = {
433cd2c6
ZW
490 .cra_name = "cbc(des)",
491 .cra_driver_name = "cbc-des-rk",
492 .cra_priority = 300,
493 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
494 CRYPTO_ALG_ASYNC,
495 .cra_blocksize = DES_BLOCK_SIZE,
496 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
497 .cra_alignmask = 0x07,
498 .cra_type = &crypto_ablkcipher_type,
499 .cra_module = THIS_MODULE,
500 .cra_init = rk_ablk_cra_init,
501 .cra_exit = rk_ablk_cra_exit,
502 .cra_u.ablkcipher = {
503 .min_keysize = DES_KEY_SIZE,
504 .max_keysize = DES_KEY_SIZE,
505 .ivsize = DES_BLOCK_SIZE,
1ad2267c 506 .setkey = rk_des_setkey,
433cd2c6
ZW
507 .encrypt = rk_des_cbc_encrypt,
508 .decrypt = rk_des_cbc_decrypt,
509 }
510 }
511};
512
513struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
bfd927ff
ZW
514 .type = ALG_TYPE_CIPHER,
515 .alg.crypto = {
433cd2c6
ZW
516 .cra_name = "ecb(des3_ede)",
517 .cra_driver_name = "ecb-des3-ede-rk",
518 .cra_priority = 300,
519 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
520 CRYPTO_ALG_ASYNC,
521 .cra_blocksize = DES_BLOCK_SIZE,
522 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
523 .cra_alignmask = 0x07,
524 .cra_type = &crypto_ablkcipher_type,
525 .cra_module = THIS_MODULE,
526 .cra_init = rk_ablk_cra_init,
527 .cra_exit = rk_ablk_cra_exit,
528 .cra_u.ablkcipher = {
529 .min_keysize = DES3_EDE_KEY_SIZE,
530 .max_keysize = DES3_EDE_KEY_SIZE,
531 .ivsize = DES_BLOCK_SIZE,
532 .setkey = rk_tdes_setkey,
533 .encrypt = rk_des3_ede_ecb_encrypt,
534 .decrypt = rk_des3_ede_ecb_decrypt,
535 }
536 }
537};
538
539struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
bfd927ff
ZW
540 .type = ALG_TYPE_CIPHER,
541 .alg.crypto = {
433cd2c6
ZW
542 .cra_name = "cbc(des3_ede)",
543 .cra_driver_name = "cbc-des3-ede-rk",
544 .cra_priority = 300,
545 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
546 CRYPTO_ALG_ASYNC,
547 .cra_blocksize = DES_BLOCK_SIZE,
548 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
549 .cra_alignmask = 0x07,
550 .cra_type = &crypto_ablkcipher_type,
551 .cra_module = THIS_MODULE,
552 .cra_init = rk_ablk_cra_init,
553 .cra_exit = rk_ablk_cra_exit,
554 .cra_u.ablkcipher = {
555 .min_keysize = DES3_EDE_KEY_SIZE,
556 .max_keysize = DES3_EDE_KEY_SIZE,
557 .ivsize = DES_BLOCK_SIZE,
558 .setkey = rk_tdes_setkey,
559 .encrypt = rk_des3_ede_cbc_encrypt,
560 .decrypt = rk_des3_ede_cbc_decrypt,
561 }
562 }
563};