]> git.ipfire.org Git - people/pmueller/ipfire-2.x.git/blame - src/patches/padlock-prereq-2.6.16.diff
Applied "check on BER decoding" security fix
[people/pmueller/ipfire-2.x.git] / src / patches / padlock-prereq-2.6.16.diff
CommitLineData
2b0b3689
MT
1Merge master.kernel.org:/pub/scm/linux/kernel/git/herbert/crypto-2.6
2
3* master.kernel.org:/pub/scm/linux/kernel/git/herbert/crypto-2.6:
4 [CRYPTO] aes: Fixed array boundary violation
5 [CRYPTO] tcrypt: Fix key alignment
6 [CRYPTO] all: Add missing cra_alignmask
7 [CRYPTO] all: Use kzalloc where possible
8 [CRYPTO] api: Align tfm context as wide as possible
9 [CRYPTO] twofish: Use rol32/ror32 where appropriate
10
11Index: linux-2.6.16.50/arch/x86_64/crypto/aes.c
12===================================================================
13--- linux-2.6.16.50.orig/arch/x86_64/crypto/aes.c 2006-07-14 18:09:26.335435750 +1200
14+++ linux-2.6.16.50/arch/x86_64/crypto/aes.c 2006-07-14 18:10:31.083482250 +1200
15@@ -77,12 +77,11 @@
16 struct aes_ctx
17 {
18 u32 key_length;
19- u32 E[60];
20- u32 D[60];
21+ u32 buf[120];
22 };
23
24-#define E_KEY ctx->E
25-#define D_KEY ctx->D
26+#define E_KEY (&ctx->buf[0])
27+#define D_KEY (&ctx->buf[60])
28
29 static u8 pow_tab[256] __initdata;
30 static u8 log_tab[256] __initdata;
31@@ -228,10 +227,10 @@
32 t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \
33 }
34
35-static int aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len,
36- u32 *flags)
37+static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
38+ unsigned int key_len, u32 *flags)
39 {
40- struct aes_ctx *ctx = ctx_arg;
41+ struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
42 const __le32 *key = (const __le32 *)in_key;
43 u32 i, j, t, u, v, w;
44
45@@ -284,8 +283,18 @@
46 return 0;
47 }
48
49-extern void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in);
50-extern void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in);
51+asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
52+asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
53+
54+static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
55+{
56+ aes_enc_blk(tfm, dst, src);
57+}
58+
59+static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
60+{
61+ aes_dec_blk(tfm, dst, src);
62+}
63
64 static struct crypto_alg aes_alg = {
65 .cra_name = "aes",
66Index: linux-2.6.16.50/crypto/aes.c
67===================================================================
68--- linux-2.6.16.50.orig/crypto/aes.c 2006-07-14 18:09:26.351436750 +1200
69+++ linux-2.6.16.50/crypto/aes.c 2006-07-14 18:10:31.087482500 +1200
70@@ -75,12 +75,11 @@
71
72 struct aes_ctx {
73 int key_length;
74- u32 E[60];
75- u32 D[60];
76+ u32 buf[120];
77 };
78
79-#define E_KEY ctx->E
80-#define D_KEY ctx->D
81+#define E_KEY (&ctx->buf[0])
82+#define D_KEY (&ctx->buf[60])
83
84 static u8 pow_tab[256] __initdata;
85 static u8 log_tab[256] __initdata;
86@@ -249,10 +248,10 @@
87 t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \
88 }
89
90-static int
91-aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags)
92+static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
93+ unsigned int key_len, u32 *flags)
94 {
95- struct aes_ctx *ctx = ctx_arg;
96+ struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
97 const __le32 *key = (const __le32 *)in_key;
98 u32 i, t, u, v, w;
99
100@@ -319,9 +318,9 @@
101 f_rl(bo, bi, 2, k); \
102 f_rl(bo, bi, 3, k)
103
104-static void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in)
105+static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
106 {
107- const struct aes_ctx *ctx = ctx_arg;
108+ const struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
109 const __le32 *src = (const __le32 *)in;
110 __le32 *dst = (__le32 *)out;
111 u32 b0[4], b1[4];
112@@ -374,9 +373,9 @@
113 i_rl(bo, bi, 2, k); \
114 i_rl(bo, bi, 3, k)
115
116-static void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in)
117+static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
118 {
119- const struct aes_ctx *ctx = ctx_arg;
120+ const struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
121 const __le32 *src = (const __le32 *)in;
122 __le32 *dst = (__le32 *)out;
123 u32 b0[4], b1[4];
124Index: linux-2.6.16.50/crypto/api.c
125===================================================================
126--- linux-2.6.16.50.orig/crypto/api.c 2006-07-14 18:09:26.351436750 +1200
127+++ linux-2.6.16.50/crypto/api.c 2006-07-14 18:10:31.091482750 +1200
128@@ -165,7 +165,7 @@
129 break;
130 }
131
132- return len + alg->cra_alignmask;
133+ return len + (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
134 }
135
136 struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags)
137@@ -179,24 +179,25 @@
138 goto out;
139
140 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags);
141- tfm = kmalloc(tfm_size, GFP_KERNEL);
142+ tfm = kzalloc(tfm_size, GFP_KERNEL);
143 if (tfm == NULL)
144 goto out_put;
145
146- memset(tfm, 0, tfm_size);
147-
148 tfm->__crt_alg = alg;
149
150 if (crypto_init_flags(tfm, flags))
151 goto out_free_tfm;
152
153- if (crypto_init_ops(tfm)) {
154- crypto_exit_ops(tfm);
155+ if (crypto_init_ops(tfm))
156 goto out_free_tfm;
157- }
158+
159+ if (alg->cra_init && alg->cra_init(tfm))
160+ goto cra_init_failed;
161
162 goto out;
163
164+cra_init_failed:
165+ crypto_exit_ops(tfm);
166 out_free_tfm:
167 kfree(tfm);
168 tfm = NULL;
169@@ -217,6 +218,8 @@
170 alg = tfm->__crt_alg;
171 size = sizeof(*tfm) + alg->cra_ctxsize;
172
173+ if (alg->cra_exit)
174+ alg->cra_exit(tfm);
175 crypto_exit_ops(tfm);
176 crypto_alg_put(alg);
177 memset(tfm, 0, size);
178@@ -226,7 +229,7 @@
179 static inline int crypto_set_driver_name(struct crypto_alg *alg)
180 {
181 static const char suffix[] = "-generic";
182- char *driver_name = (char *)alg->cra_driver_name;
183+ char *driver_name = alg->cra_driver_name;
184 int len;
185
186 if (*driver_name)
187@@ -264,13 +267,13 @@
188 down_write(&crypto_alg_sem);
189
190 list_for_each_entry(q, &crypto_alg_list, cra_list) {
191- if (!strcmp(q->cra_driver_name, alg->cra_driver_name)) {
192+ if (q == alg) {
193 ret = -EEXIST;
194 goto out;
195 }
196 }
197
198- list_add_tail(&alg->cra_list, &crypto_alg_list);
199+ list_add(&alg->cra_list, &crypto_alg_list);
200 out:
201 up_write(&crypto_alg_sem);
202 return ret;
203Index: linux-2.6.16.50/crypto/deflate.c
204===================================================================
205--- linux-2.6.16.50.orig/crypto/deflate.c 2006-07-14 18:09:26.351436750 +1200
206+++ linux-2.6.16.50/crypto/deflate.c 2006-07-14 18:10:31.091482750 +1200
207@@ -73,12 +73,11 @@
208 int ret = 0;
209 struct z_stream_s *stream = &ctx->decomp_stream;
210
211- stream->workspace = kmalloc(zlib_inflate_workspacesize(), GFP_KERNEL);
212+ stream->workspace = kzalloc(zlib_inflate_workspacesize(), GFP_KERNEL);
213 if (!stream->workspace ) {
214 ret = -ENOMEM;
215 goto out;
216 }
217- memset(stream->workspace, 0, zlib_inflate_workspacesize());
218 ret = zlib_inflateInit2(stream, -DEFLATE_DEF_WINBITS);
219 if (ret != Z_OK) {
220 ret = -EINVAL;
221@@ -103,8 +102,9 @@
222 kfree(ctx->decomp_stream.workspace);
223 }
224
225-static int deflate_init(void *ctx)
226+static int deflate_init(struct crypto_tfm *tfm)
227 {
228+ struct deflate_ctx *ctx = crypto_tfm_ctx(tfm);
229 int ret;
230
231 ret = deflate_comp_init(ctx);
232@@ -117,17 +117,19 @@
233 return ret;
234 }
235
236-static void deflate_exit(void *ctx)
237+static void deflate_exit(struct crypto_tfm *tfm)
238 {
239+ struct deflate_ctx *ctx = crypto_tfm_ctx(tfm);
240+
241 deflate_comp_exit(ctx);
242 deflate_decomp_exit(ctx);
243 }
244
245-static int deflate_compress(void *ctx, const u8 *src, unsigned int slen,
246- u8 *dst, unsigned int *dlen)
247+static int deflate_compress(struct crypto_tfm *tfm, const u8 *src,
248+ unsigned int slen, u8 *dst, unsigned int *dlen)
249 {
250 int ret = 0;
251- struct deflate_ctx *dctx = ctx;
252+ struct deflate_ctx *dctx = crypto_tfm_ctx(tfm);
253 struct z_stream_s *stream = &dctx->comp_stream;
254
255 ret = zlib_deflateReset(stream);
256@@ -152,12 +154,12 @@
257 return ret;
258 }
259
260-static int deflate_decompress(void *ctx, const u8 *src, unsigned int slen,
261- u8 *dst, unsigned int *dlen)
262+static int deflate_decompress(struct crypto_tfm *tfm, const u8 *src,
263+ unsigned int slen, u8 *dst, unsigned int *dlen)
264 {
265
266 int ret = 0;
267- struct deflate_ctx *dctx = ctx;
268+ struct deflate_ctx *dctx = crypto_tfm_ctx(tfm);
269 struct z_stream_s *stream = &dctx->decomp_stream;
270
271 ret = zlib_inflateReset(stream);
272@@ -199,9 +201,9 @@
273 .cra_ctxsize = sizeof(struct deflate_ctx),
274 .cra_module = THIS_MODULE,
275 .cra_list = LIST_HEAD_INIT(alg.cra_list),
276+ .cra_init = deflate_init,
277+ .cra_exit = deflate_exit,
278 .cra_u = { .compress = {
279- .coa_init = deflate_init,
280- .coa_exit = deflate_exit,
281 .coa_compress = deflate_compress,
282 .coa_decompress = deflate_decompress } }
283 };
284Index: linux-2.6.16.50/crypto/des.c
285===================================================================
286--- linux-2.6.16.50.orig/crypto/des.c 2006-07-14 18:09:26.355437000 +1200
287+++ linux-2.6.16.50/crypto/des.c 2006-07-14 18:10:31.099483250 +1200
288@@ -783,9 +783,10 @@
289 }
290 }
291
292-static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
293+static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
294+ unsigned int keylen, u32 *flags)
295 {
296- struct des_ctx *dctx = ctx;
297+ struct des_ctx *dctx = crypto_tfm_ctx(tfm);
298 u32 tmp[DES_EXPKEY_WORDS];
299 int ret;
300
301@@ -803,9 +804,10 @@
302 return 0;
303 }
304
305-static void des_encrypt(void *ctx, u8 *dst, const u8 *src)
306+static void des_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
307 {
308- const u32 *K = ((struct des_ctx *)ctx)->expkey;
309+ struct des_ctx *ctx = crypto_tfm_ctx(tfm);
310+ const u32 *K = ctx->expkey;
311 const __le32 *s = (const __le32 *)src;
312 __le32 *d = (__le32 *)dst;
313 u32 L, R, A, B;
314@@ -825,9 +827,10 @@
315 d[1] = cpu_to_le32(L);
316 }
317
318-static void des_decrypt(void *ctx, u8 *dst, const u8 *src)
319+static void des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
320 {
321- const u32 *K = ((struct des_ctx *)ctx)->expkey + DES_EXPKEY_WORDS - 2;
322+ struct des_ctx *ctx = crypto_tfm_ctx(tfm);
323+ const u32 *K = ctx->expkey + DES_EXPKEY_WORDS - 2;
324 const __le32 *s = (const __le32 *)src;
325 __le32 *d = (__le32 *)dst;
326 u32 L, R, A, B;
327@@ -860,11 +863,11 @@
328 * property.
329 *
330 */
331-static int des3_ede_setkey(void *ctx, const u8 *key,
332+static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key,
333 unsigned int keylen, u32 *flags)
334 {
335 const u32 *K = (const u32 *)key;
336- struct des3_ede_ctx *dctx = ctx;
337+ struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm);
338 u32 *expkey = dctx->expkey;
339
340 if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
341@@ -881,9 +884,9 @@
342 return 0;
343 }
344
345-static void des3_ede_encrypt(void *ctx, u8 *dst, const u8 *src)
346+static void des3_ede_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
347 {
348- struct des3_ede_ctx *dctx = ctx;
349+ struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm);
350 const u32 *K = dctx->expkey;
351 const __le32 *s = (const __le32 *)src;
352 __le32 *d = (__le32 *)dst;
353@@ -912,9 +915,9 @@
354 d[1] = cpu_to_le32(L);
355 }
356
357-static void des3_ede_decrypt(void *ctx, u8 *dst, const u8 *src)
358+static void des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
359 {
360- struct des3_ede_ctx *dctx = ctx;
361+ struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm);
362 const u32 *K = dctx->expkey + DES3_EDE_EXPKEY_WORDS - 2;
363 const __le32 *s = (const __le32 *)src;
364 __le32 *d = (__le32 *)dst;
365@@ -965,6 +968,7 @@
366 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
367 .cra_ctxsize = sizeof(struct des3_ede_ctx),
368 .cra_module = THIS_MODULE,
369+ .cra_alignmask = 3,
370 .cra_list = LIST_HEAD_INIT(des3_ede_alg.cra_list),
371 .cra_u = { .cipher = {
372 .cia_min_keysize = DES3_EDE_KEY_SIZE,
373Index: linux-2.6.16.50/crypto/serpent.c
374===================================================================
375--- linux-2.6.16.50.orig/crypto/serpent.c 2006-07-14 18:09:26.355437000 +1200
376+++ linux-2.6.16.50/crypto/serpent.c 2006-07-14 18:10:31.103483500 +1200
377@@ -215,9 +215,11 @@
378 };
379
380
381-static int serpent_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
382+static int serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
383+ unsigned int keylen, u32 *flags)
384 {
385- u32 *k = ((struct serpent_ctx *)ctx)->expkey;
386+ struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
387+ u32 *k = ctx->expkey;
388 u8 *k8 = (u8 *)k;
389 u32 r0,r1,r2,r3,r4;
390 int i;
391@@ -365,10 +367,11 @@
392 return 0;
393 }
394
395-static void serpent_encrypt(void *ctx, u8 *dst, const u8 *src)
396+static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
397 {
398+ struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
399 const u32
400- *k = ((struct serpent_ctx *)ctx)->expkey,
401+ *k = ctx->expkey,
402 *s = (const u32 *)src;
403 u32 *d = (u32 *)dst,
404 r0, r1, r2, r3, r4;
405@@ -423,8 +426,9 @@
406 d[3] = cpu_to_le32(r3);
407 }
408
409-static void serpent_decrypt(void *ctx, u8 *dst, const u8 *src)
410+static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
411 {
412+ struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
413 const u32
414 *k = ((struct serpent_ctx *)ctx)->expkey,
415 *s = (const u32 *)src;
416@@ -481,6 +485,7 @@
417 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
418 .cra_blocksize = SERPENT_BLOCK_SIZE,
419 .cra_ctxsize = sizeof(struct serpent_ctx),
420+ .cra_alignmask = 3,
421 .cra_module = THIS_MODULE,
422 .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list),
423 .cra_u = { .cipher = {
424@@ -491,7 +496,8 @@
425 .cia_decrypt = serpent_decrypt } }
426 };
427
428-static int tnepres_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
429+static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key,
430+ unsigned int keylen, u32 *flags)
431 {
432 u8 rev_key[SERPENT_MAX_KEY_SIZE];
433 int i;
434@@ -505,10 +511,10 @@
435 for (i = 0; i < keylen; ++i)
436 rev_key[keylen - i - 1] = key[i];
437
438- return serpent_setkey(ctx, rev_key, keylen, flags);
439+ return serpent_setkey(tfm, rev_key, keylen, flags);
440 }
441
442-static void tnepres_encrypt(void *ctx, u8 *dst, const u8 *src)
443+static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
444 {
445 const u32 * const s = (const u32 * const)src;
446 u32 * const d = (u32 * const)dst;
447@@ -520,7 +526,7 @@
448 rs[2] = swab32(s[1]);
449 rs[3] = swab32(s[0]);
450
451- serpent_encrypt(ctx, (u8 *)rd, (u8 *)rs);
452+ serpent_encrypt(tfm, (u8 *)rd, (u8 *)rs);
453
454 d[0] = swab32(rd[3]);
455 d[1] = swab32(rd[2]);
456@@ -528,7 +534,7 @@
457 d[3] = swab32(rd[0]);
458 }
459
460-static void tnepres_decrypt(void *ctx, u8 *dst, const u8 *src)
461+static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
462 {
463 const u32 * const s = (const u32 * const)src;
464 u32 * const d = (u32 * const)dst;
465@@ -540,7 +546,7 @@
466 rs[2] = swab32(s[1]);
467 rs[3] = swab32(s[0]);
468
469- serpent_decrypt(ctx, (u8 *)rd, (u8 *)rs);
470+ serpent_decrypt(tfm, (u8 *)rd, (u8 *)rs);
471
472 d[0] = swab32(rd[3]);
473 d[1] = swab32(rd[2]);
474Index: linux-2.6.16.50/crypto/tcrypt.h
475===================================================================
476--- linux-2.6.16.50.orig/crypto/tcrypt.h 2006-07-14 18:09:26.355437000 +1200
477+++ linux-2.6.16.50/crypto/tcrypt.h 2006-07-14 18:10:31.111484000 +1200
478@@ -26,37 +26,38 @@
479 #define MAX_IVLEN 32
480
481 struct hash_testvec {
482+ /* only used with keyed hash algorithms */
483+ char key[128] __attribute__ ((__aligned__(4)));
484 char plaintext[128];
485- unsigned char psize;
486 char digest[MAX_DIGEST_SIZE];
487- unsigned char np;
488 unsigned char tap[MAX_TAP];
489- char key[128]; /* only used with keyed hash algorithms */
490+ unsigned char psize;
491+ unsigned char np;
492 unsigned char ksize;
493 };
494
495 struct hmac_testvec {
496 char key[128];
497- unsigned char ksize;
498 char plaintext[128];
499- unsigned char psize;
500 char digest[MAX_DIGEST_SIZE];
501- unsigned char np;
502 unsigned char tap[MAX_TAP];
503+ unsigned char ksize;
504+ unsigned char psize;
505+ unsigned char np;
506 };
507
508 struct cipher_testvec {
509+ char key[MAX_KEYLEN] __attribute__ ((__aligned__(4)));
510+ char iv[MAX_IVLEN];
511+ char input[48];
512+ char result[48];
513+ unsigned char tap[MAX_TAP];
514+ int np;
515 unsigned char fail;
516 unsigned char wk; /* weak key flag */
517- char key[MAX_KEYLEN];
518 unsigned char klen;
519- char iv[MAX_IVLEN];
520- char input[48];
521 unsigned char ilen;
522- char result[48];
523 unsigned char rlen;
524- int np;
525- unsigned char tap[MAX_TAP];
526 };
527
528 struct cipher_speed {
529@@ -64,6 +65,11 @@
530 unsigned int blen;
531 };
532
533+struct digest_speed {
534+ unsigned int blen; /* buffer length */
535+ unsigned int plen; /* per-update length */
536+};
537+
538 /*
539 * MD4 test vectors from RFC1320
540 */
541@@ -2974,4 +2980,35 @@
542 { .klen = 0, .blen = 0, }
543 };
544
545+/*
546+ * Digest speed tests
547+ */
548+static struct digest_speed generic_digest_speed_template[] = {
549+ { .blen = 16, .plen = 16, },
550+ { .blen = 64, .plen = 16, },
551+ { .blen = 64, .plen = 64, },
552+ { .blen = 256, .plen = 16, },
553+ { .blen = 256, .plen = 64, },
554+ { .blen = 256, .plen = 256, },
555+ { .blen = 1024, .plen = 16, },
556+ { .blen = 1024, .plen = 256, },
557+ { .blen = 1024, .plen = 1024, },
558+ { .blen = 2048, .plen = 16, },
559+ { .blen = 2048, .plen = 256, },
560+ { .blen = 2048, .plen = 1024, },
561+ { .blen = 2048, .plen = 2048, },
562+ { .blen = 4096, .plen = 16, },
563+ { .blen = 4096, .plen = 256, },
564+ { .blen = 4096, .plen = 1024, },
565+ { .blen = 4096, .plen = 4096, },
566+ { .blen = 8192, .plen = 16, },
567+ { .blen = 8192, .plen = 256, },
568+ { .blen = 8192, .plen = 1024, },
569+ { .blen = 8192, .plen = 4096, },
570+ { .blen = 8192, .plen = 8192, },
571+
572+ /* End marker */
573+ { .blen = 0, .plen = 0, }
574+};
575+
576 #endif /* _CRYPTO_TCRYPT_H */
577Index: linux-2.6.16.50/crypto/twofish.c
578===================================================================
579--- linux-2.6.16.50.orig/crypto/twofish.c 2006-07-14 18:09:26.359437250 +1200
580+++ linux-2.6.16.50/crypto/twofish.c 2006-07-14 18:10:31.119484500 +1200
581@@ -44,6 +44,7 @@
582 #include <linux/types.h>
583 #include <linux/errno.h>
584 #include <linux/crypto.h>
585+#include <linux/bitops.h>
586
587
588 /* The large precomputed tables for the Twofish cipher (twofish.c)
589@@ -542,9 +543,9 @@
590 #define CALC_K(a, j, k, l, m, n) \
591 x = CALC_K_2 (k, l, k, l, 0); \
592 y = CALC_K_2 (m, n, m, n, 4); \
593- y = (y << 8) + (y >> 24); \
594+ y = rol32(y, 8); \
595 x += y; y += x; ctx->a[j] = x; \
596- ctx->a[(j) + 1] = (y << 9) + (y >> 23)
597+ ctx->a[(j) + 1] = rol32(y, 9)
598
599 #define CALC_K192_2(a, b, c, d, j) \
600 CALC_K_2 (q0[a ^ key[(j) + 16]], \
601@@ -555,9 +556,9 @@
602 #define CALC_K192(a, j, k, l, m, n) \
603 x = CALC_K192_2 (l, l, k, k, 0); \
604 y = CALC_K192_2 (n, n, m, m, 4); \
605- y = (y << 8) + (y >> 24); \
606+ y = rol32(y, 8); \
607 x += y; y += x; ctx->a[j] = x; \
608- ctx->a[(j) + 1] = (y << 9) + (y >> 23)
609+ ctx->a[(j) + 1] = rol32(y, 9)
610
611 #define CALC_K256_2(a, b, j) \
612 CALC_K192_2 (q1[b ^ key[(j) + 24]], \
613@@ -568,9 +569,9 @@
614 #define CALC_K256(a, j, k, l, m, n) \
615 x = CALC_K256_2 (k, l, 0); \
616 y = CALC_K256_2 (m, n, 4); \
617- y = (y << 8) + (y >> 24); \
618+ y = rol32(y, 8); \
619 x += y; y += x; ctx->a[j] = x; \
620- ctx->a[(j) + 1] = (y << 9) + (y >> 23)
621+ ctx->a[(j) + 1] = rol32(y, 9)
622
623
624 /* Macros to compute the g() function in the encryption and decryption
625@@ -594,15 +595,15 @@
626 x = G1 (a); y = G2 (b); \
627 x += y; y += x + ctx->k[2 * (n) + 1]; \
628 (c) ^= x + ctx->k[2 * (n)]; \
629- (c) = ((c) >> 1) + ((c) << 31); \
630- (d) = (((d) << 1)+((d) >> 31)) ^ y
631+ (c) = ror32((c), 1); \
632+ (d) = rol32((d), 1) ^ y
633
634 #define DECROUND(n, a, b, c, d) \
635 x = G1 (a); y = G2 (b); \
636 x += y; y += x; \
637 (d) ^= y + ctx->k[2 * (n) + 1]; \
638- (d) = ((d) >> 1) + ((d) << 31); \
639- (c) = (((c) << 1)+((c) >> 31)); \
640+ (d) = ror32((d), 1); \
641+ (c) = rol32((c), 1); \
642 (c) ^= (x + ctx->k[2 * (n)])
643
644 /* Encryption and decryption cycles; each one is simply two Feistel rounds
645@@ -642,11 +643,11 @@
646 };
647
648 /* Perform the key setup. */
649-static int twofish_setkey(void *cx, const u8 *key,
650- unsigned int key_len, u32 *flags)
651+static int twofish_setkey(struct crypto_tfm *tfm, const u8 *key,
652+ unsigned int key_len, u32 *flags)
653 {
654
655- struct twofish_ctx *ctx = cx;
656+ struct twofish_ctx *ctx = crypto_tfm_ctx(tfm);
657
658 int i, j, k;
659
660@@ -801,9 +802,9 @@
661 }
662
663 /* Encrypt one block. in and out may be the same. */
664-static void twofish_encrypt(void *cx, u8 *out, const u8 *in)
665+static void twofish_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
666 {
667- struct twofish_ctx *ctx = cx;
668+ struct twofish_ctx *ctx = crypto_tfm_ctx(tfm);
669 const __le32 *src = (const __le32 *)in;
670 __le32 *dst = (__le32 *)out;
671
672@@ -838,9 +839,9 @@
673 }
674
675 /* Decrypt one block. in and out may be the same. */
676-static void twofish_decrypt(void *cx, u8 *out, const u8 *in)
677+static void twofish_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
678 {
679- struct twofish_ctx *ctx = cx;
680+ struct twofish_ctx *ctx = crypto_tfm_ctx(tfm);
681 const __le32 *src = (const __le32 *)in;
682 __le32 *dst = (__le32 *)out;
683
684Index: linux-2.6.16.50/drivers/crypto/padlock-aes.c
685===================================================================
686--- linux-2.6.16.50.orig/drivers/crypto/padlock-aes.c 2006-07-14 18:09:26.387439000 +1200
687+++ linux-2.6.16.50/drivers/crypto/padlock-aes.c 2006-07-18 01:35:50.305291201 +1200
688@@ -59,16 +59,20 @@
689 #define AES_EXTENDED_KEY_SIZE 64 /* in uint32_t units */
690 #define AES_EXTENDED_KEY_SIZE_B (AES_EXTENDED_KEY_SIZE * sizeof(uint32_t))
691
692+/* Whenever making any changes to the following
693+ * structure *make sure* you keep E, d_data
694+ * and cword aligned on 16 Bytes boundaries!!! */
695 struct aes_ctx {
696- uint32_t e_data[AES_EXTENDED_KEY_SIZE];
697- uint32_t d_data[AES_EXTENDED_KEY_SIZE];
698 struct {
699 struct cword encrypt;
700 struct cword decrypt;
701 } cword;
702- uint32_t *E;
703- uint32_t *D;
704+ u32 *D;
705 int key_length;
706+ u32 E[AES_EXTENDED_KEY_SIZE]
707+ __attribute__ ((__aligned__(PADLOCK_ALIGNMENT)));
708+ u32 d_data[AES_EXTENDED_KEY_SIZE]
709+ __attribute__ ((__aligned__(PADLOCK_ALIGNMENT)));
710 };
711
712 /* ====== Key management routines ====== */
713@@ -282,15 +286,20 @@
714 return 0;
715 }
716
717-static inline struct aes_ctx *aes_ctx(void *ctx)
718+static inline struct aes_ctx *aes_ctx(struct crypto_tfm *tfm)
719 {
720- return (struct aes_ctx *)ALIGN((unsigned long)ctx, PADLOCK_ALIGNMENT);
721+ unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm);
722+ unsigned long align = PADLOCK_ALIGNMENT;
723+
724+ if (align <= crypto_tfm_ctx_alignment())
725+ align = 1;
726+ return (struct aes_ctx *)ALIGN(addr, align);
727 }
728
729-static int
730-aes_set_key(void *ctx_arg, const uint8_t *in_key, unsigned int key_len, uint32_t *flags)
731+static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
732+ unsigned int key_len, u32 *flags)
733 {
734- struct aes_ctx *ctx = aes_ctx(ctx_arg);
735+ struct aes_ctx *ctx = aes_ctx(tfm);
736 const __le32 *key = (const __le32 *)in_key;
737 uint32_t i, t, u, v, w;
738 uint32_t P[AES_EXTENDED_KEY_SIZE];
739@@ -308,8 +317,7 @@
740 * itself we must supply the plain key for both encryption
741 * and decryption.
742 */
743- ctx->E = ctx->e_data;
744- ctx->D = ctx->e_data;
745+ ctx->D = ctx->E;
746
747 E_KEY[0] = le32_to_cpu(key[0]);
748 E_KEY[1] = le32_to_cpu(key[1]);
749@@ -410,24 +418,22 @@
750 return iv;
751 }
752
753-static void
754-aes_encrypt(void *ctx_arg, uint8_t *out, const uint8_t *in)
755+static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
756 {
757- struct aes_ctx *ctx = aes_ctx(ctx_arg);
758+ struct aes_ctx *ctx = aes_ctx(tfm);
759 padlock_xcrypt_ecb(in, out, ctx->E, &ctx->cword.encrypt, 1);
760 }
761
762-static void
763-aes_decrypt(void *ctx_arg, uint8_t *out, const uint8_t *in)
764+static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
765 {
766- struct aes_ctx *ctx = aes_ctx(ctx_arg);
767+ struct aes_ctx *ctx = aes_ctx(tfm);
768 padlock_xcrypt_ecb(in, out, ctx->D, &ctx->cword.decrypt, 1);
769 }
770
771 static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out,
772 const u8 *in, unsigned int nbytes)
773 {
774- struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm));
775+ struct aes_ctx *ctx = aes_ctx(desc->tfm);
776 padlock_xcrypt_ecb(in, out, ctx->E, &ctx->cword.encrypt,
777 nbytes / AES_BLOCK_SIZE);
778 return nbytes & ~(AES_BLOCK_SIZE - 1);
779@@ -436,7 +442,7 @@
780 static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out,
781 const u8 *in, unsigned int nbytes)
782 {
783- struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm));
784+ struct aes_ctx *ctx = aes_ctx(desc->tfm);
785 padlock_xcrypt_ecb(in, out, ctx->D, &ctx->cword.decrypt,
786 nbytes / AES_BLOCK_SIZE);
787 return nbytes & ~(AES_BLOCK_SIZE - 1);
788@@ -445,7 +451,7 @@
789 static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out,
790 const u8 *in, unsigned int nbytes)
791 {
792- struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm));
793+ struct aes_ctx *ctx = aes_ctx(desc->tfm);
794 u8 *iv;
795
796 iv = padlock_xcrypt_cbc(in, out, ctx->E, desc->info,
797@@ -458,7 +464,7 @@
798 static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out,
799 const u8 *in, unsigned int nbytes)
800 {
801- struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm));
802+ struct aes_ctx *ctx = aes_ctx(desc->tfm);
803 padlock_xcrypt_cbc(in, out, ctx->D, desc->info, &ctx->cword.decrypt,
804 nbytes / AES_BLOCK_SIZE);
805 return nbytes & ~(AES_BLOCK_SIZE - 1);
806Index: linux-2.6.16.50/include/linux/crypto.h
807===================================================================
808--- linux-2.6.16.50.orig/include/linux/crypto.h 2006-07-14 18:09:26.387439000 +1200
809+++ linux-2.6.16.50/include/linux/crypto.h 2006-07-18 01:35:17.475239451 +1200
810@@ -67,7 +67,7 @@
811
812 struct cipher_desc {
813 struct crypto_tfm *tfm;
814- void (*crfn)(void *ctx, u8 *dst, const u8 *src);
815+ void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
816 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
817 const u8 *src, unsigned int nbytes);
818 void *info;
819@@ -80,10 +80,10 @@
820 struct cipher_alg {
821 unsigned int cia_min_keysize;
822 unsigned int cia_max_keysize;
823- int (*cia_setkey)(void *ctx, const u8 *key,
824+ int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
825 unsigned int keylen, u32 *flags);
826- void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src);
827- void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src);
828+ void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
829+ void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
830
831 unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc,
832 u8 *dst, const u8 *src,
833@@ -101,20 +101,19 @@
834
835 struct digest_alg {
836 unsigned int dia_digestsize;
837- void (*dia_init)(void *ctx);
838- void (*dia_update)(void *ctx, const u8 *data, unsigned int len);
839- void (*dia_final)(void *ctx, u8 *out);
840- int (*dia_setkey)(void *ctx, const u8 *key,
841+ void (*dia_init)(struct crypto_tfm *tfm);
842+ void (*dia_update)(struct crypto_tfm *tfm, const u8 *data,
843+ unsigned int len);
844+ void (*dia_final)(struct crypto_tfm *tfm, u8 *out);
845+ int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key,
846 unsigned int keylen, u32 *flags);
847 };
848
849 struct compress_alg {
850- int (*coa_init)(void *ctx);
851- void (*coa_exit)(void *ctx);
852- int (*coa_compress)(void *ctx, const u8 *src, unsigned int slen,
853- u8 *dst, unsigned int *dlen);
854- int (*coa_decompress)(void *ctx, const u8 *src, unsigned int slen,
855- u8 *dst, unsigned int *dlen);
856+ int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
857+ unsigned int slen, u8 *dst, unsigned int *dlen);
858+ int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
859+ unsigned int slen, u8 *dst, unsigned int *dlen);
860 };
861
862 #define cra_cipher cra_u.cipher
863@@ -130,14 +129,17 @@
864
865 int cra_priority;
866
867- const char cra_name[CRYPTO_MAX_ALG_NAME];
868- const char cra_driver_name[CRYPTO_MAX_ALG_NAME];
869+ char cra_name[CRYPTO_MAX_ALG_NAME];
870+ char cra_driver_name[CRYPTO_MAX_ALG_NAME];
871
872 union {
873 struct cipher_alg cipher;
874 struct digest_alg digest;
875 struct compress_alg compress;
876 } cra_u;
877+
878+ int (*cra_init)(struct crypto_tfm *tfm);
879+ void (*cra_exit)(struct crypto_tfm *tfm);
880
881 struct module *cra_module;
882 };
883@@ -229,6 +231,8 @@
884 } crt_u;
885
886 struct crypto_alg *__crt_alg;
887+
888+ char __crt_ctx[] __attribute__ ((__aligned__));
889 };
890
891 /*
892@@ -301,7 +305,13 @@
893
894 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
895 {
896- return (void *)&tfm[1];
897+ return tfm->__crt_ctx;
898+}
899+
900+static inline unsigned int crypto_tfm_ctx_alignment(void)
901+{
902+ struct crypto_tfm *tfm;
903+ return __alignof__(tfm->__crt_ctx);
904 }
905
906 /*
907Index: linux-2.6.16.50/arch/i386/crypto/aes-i586-asm.S
908===================================================================
909--- linux-2.6.16.50.orig/arch/i386/crypto/aes-i586-asm.S 2006-07-14 18:09:26.339436000 +1200
910+++ linux-2.6.16.50/arch/i386/crypto/aes-i586-asm.S 2006-07-14 18:10:31.131485250 +1200
911@@ -36,22 +36,19 @@
912 .file "aes-i586-asm.S"
913 .text
914
915-// aes_rval aes_enc_blk(const unsigned char in_blk[], unsigned char out_blk[], const aes_ctx cx[1])//
916-// aes_rval aes_dec_blk(const unsigned char in_blk[], unsigned char out_blk[], const aes_ctx cx[1])//
917-
918-#define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words)
919-
920-// offsets to parameters with one register pushed onto stack
921+#include <asm/asm-offsets.h>
922
923-#define in_blk 8 // input byte array address parameter
924-#define out_blk 12 // output byte array address parameter
925-#define ctx 16 // AES context structure
926-
927-// offsets in context structure
928+#define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words)
929
930-#define ekey 0 // encryption key schedule base address
931-#define nrnd 256 // number of rounds
932-#define dkey 260 // decryption key schedule base address
933+/* offsets to parameters with one register pushed onto stack */
934+#define tfm 8
935+#define out_blk 12
936+#define in_blk 16
937+
938+/* offsets in crypto_tfm structure */
939+#define ekey (crypto_tfm_ctx_offset + 0)
940+#define nrnd (crypto_tfm_ctx_offset + 256)
941+#define dkey (crypto_tfm_ctx_offset + 260)
942
943 // register mapping for encrypt and decrypt subroutines
944
945@@ -220,6 +217,7 @@
946 do_col (table, r5,r0,r1,r4, r2,r3); /* idx=r5 */
947
948 // AES (Rijndael) Encryption Subroutine
949+/* void aes_enc_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */
950
951 .global aes_enc_blk
952
953@@ -230,7 +228,7 @@
954
955 aes_enc_blk:
956 push %ebp
957- mov ctx(%esp),%ebp // pointer to context
958+ mov tfm(%esp),%ebp
959
960 // CAUTION: the order and the values used in these assigns
961 // rely on the register mappings
962@@ -295,6 +293,7 @@
963 ret
964
965 // AES (Rijndael) Decryption Subroutine
966+/* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */
967
968 .global aes_dec_blk
969
970@@ -305,7 +304,7 @@
971
972 aes_dec_blk:
973 push %ebp
974- mov ctx(%esp),%ebp // pointer to context
975+ mov tfm(%esp),%ebp
976
977 // CAUTION: the order and the values used in these assigns
978 // rely on the register mappings
979Index: linux-2.6.16.50/arch/i386/crypto/aes.c
980===================================================================
981--- linux-2.6.16.50.orig/arch/i386/crypto/aes.c 2006-07-14 18:09:26.343436250 +1200
982+++ linux-2.6.16.50/arch/i386/crypto/aes.c 2006-07-14 18:10:31.135485500 +1200
983@@ -45,8 +45,8 @@
984 #include <linux/crypto.h>
985 #include <linux/linkage.h>
986
987-asmlinkage void aes_enc_blk(const u8 *src, u8 *dst, void *ctx);
988-asmlinkage void aes_dec_blk(const u8 *src, u8 *dst, void *ctx);
989+asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
990+asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
991
992 #define AES_MIN_KEY_SIZE 16
993 #define AES_MAX_KEY_SIZE 32
994@@ -378,12 +378,12 @@
995 k[8*(i)+11] = ss[3]; \
996 }
997
998-static int
999-aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags)
1000+static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
1001+ unsigned int key_len, u32 *flags)
1002 {
1003 int i;
1004 u32 ss[8];
1005- struct aes_ctx *ctx = ctx_arg;
1006+ struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
1007 const __le32 *key = (const __le32 *)in_key;
1008
1009 /* encryption schedule */
1010@@ -464,16 +464,16 @@
1011 return 0;
1012 }
1013
1014-static inline void aes_encrypt(void *ctx, u8 *dst, const u8 *src)
1015+static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1016 {
1017- aes_enc_blk(src, dst, ctx);
1018+ aes_enc_blk(tfm, dst, src);
1019 }
1020-static inline void aes_decrypt(void *ctx, u8 *dst, const u8 *src)
1021+
1022+static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1023 {
1024- aes_dec_blk(src, dst, ctx);
1025+ aes_dec_blk(tfm, dst, src);
1026 }
1027
1028-
1029 static struct crypto_alg aes_alg = {
1030 .cra_name = "aes",
1031 .cra_driver_name = "aes-i586",
1032Index: linux-2.6.16.50/arch/i386/kernel/asm-offsets.c
1033===================================================================
1034--- linux-2.6.16.50.orig/arch/i386/kernel/asm-offsets.c 2006-07-14 18:09:26.343436250 +1200
1035+++ linux-2.6.16.50/arch/i386/kernel/asm-offsets.c 2006-07-14 18:10:31.139485750 +1200
1036@@ -8,6 +8,7 @@
1037 #include <linux/signal.h>
1038 #include <linux/personality.h>
1039 #include <linux/suspend.h>
1040+#include <linux/crypto.h>
1041 #include <asm/ucontext.h>
1042 #include "sigframe.h"
1043 #include <asm/fixmap.h>
1044@@ -69,4 +70,6 @@
1045
1046 DEFINE(PAGE_SIZE_asm, PAGE_SIZE);
1047 DEFINE(VSYSCALL_BASE, __fix_to_virt(FIX_VSYSCALL));
1048+
1049+ OFFSET(crypto_tfm_ctx_offset, crypto_tfm, __crt_ctx);
1050 }
1051Index: linux-2.6.16.50/arch/s390/crypto/aes_s390.c
1052===================================================================
1053--- linux-2.6.16.50.orig/arch/s390/crypto/aes_s390.c 2006-07-14 18:09:26.343436250 +1200
1054+++ linux-2.6.16.50/arch/s390/crypto/aes_s390.c 2006-07-14 18:10:31.139485750 +1200
1055@@ -37,10 +37,10 @@
1056 int key_len;
1057 };
1058
1059-static int aes_set_key(void *ctx, const u8 *in_key, unsigned int key_len,
1060- u32 *flags)
1061+static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
1062+ unsigned int key_len, u32 *flags)
1063 {
1064- struct s390_aes_ctx *sctx = ctx;
1065+ struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
1066
1067 switch (key_len) {
1068 case 16:
1069@@ -70,9 +70,9 @@
1070 return -EINVAL;
1071 }
1072
1073-static void aes_encrypt(void *ctx, u8 *out, const u8 *in)
1074+static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1075 {
1076- const struct s390_aes_ctx *sctx = ctx;
1077+ const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
1078
1079 switch (sctx->key_len) {
1080 case 16:
1081@@ -90,9 +90,9 @@
1082 }
1083 }
1084
1085-static void aes_decrypt(void *ctx, u8 *out, const u8 *in)
1086+static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1087 {
1088- const struct s390_aes_ctx *sctx = ctx;
1089+ const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
1090
1091 switch (sctx->key_len) {
1092 case 16:
1093Index: linux-2.6.16.50/arch/s390/crypto/des_s390.c
1094===================================================================
1095--- linux-2.6.16.50.orig/arch/s390/crypto/des_s390.c 2006-07-14 18:09:26.347436500 +1200
1096+++ linux-2.6.16.50/arch/s390/crypto/des_s390.c 2006-07-14 18:10:31.147486250 +1200
1097@@ -44,10 +44,10 @@
1098 u8 key[DES3_192_KEY_SIZE];
1099 };
1100
1101-static int des_setkey(void *ctx, const u8 *key, unsigned int keylen,
1102- u32 *flags)
1103+static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
1104+ unsigned int keylen, u32 *flags)
1105 {
1106- struct crypt_s390_des_ctx *dctx = ctx;
1107+ struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm);
1108 int ret;
1109
1110 /* test if key is valid (not a weak key) */
1111@@ -57,16 +57,16 @@
1112 return ret;
1113 }
1114
1115-static void des_encrypt(void *ctx, u8 *out, const u8 *in)
1116+static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1117 {
1118- struct crypt_s390_des_ctx *dctx = ctx;
1119+ struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm);
1120
1121 crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, out, in, DES_BLOCK_SIZE);
1122 }
1123
1124-static void des_decrypt(void *ctx, u8 *out, const u8 *in)
1125+static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1126 {
1127- struct crypt_s390_des_ctx *dctx = ctx;
1128+ struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm);
1129
1130 crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE);
1131 }
1132@@ -166,11 +166,11 @@
1133 * Implementers MUST reject keys that exhibit this property.
1134 *
1135 */
1136-static int des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen,
1137- u32 *flags)
1138+static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key,
1139+ unsigned int keylen, u32 *flags)
1140 {
1141 int i, ret;
1142- struct crypt_s390_des3_128_ctx *dctx = ctx;
1143+ struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm);
1144 const u8* temp_key = key;
1145
1146 if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) {
1147@@ -186,17 +186,17 @@
1148 return 0;
1149 }
1150
1151-static void des3_128_encrypt(void *ctx, u8 *dst, const u8 *src)
1152+static void des3_128_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1153 {
1154- struct crypt_s390_des3_128_ctx *dctx = ctx;
1155+ struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm);
1156
1157 crypt_s390_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src,
1158 DES3_128_BLOCK_SIZE);
1159 }
1160
1161-static void des3_128_decrypt(void *ctx, u8 *dst, const u8 *src)
1162+static void des3_128_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1163 {
1164- struct crypt_s390_des3_128_ctx *dctx = ctx;
1165+ struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm);
1166
1167 crypt_s390_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src,
1168 DES3_128_BLOCK_SIZE);
1169@@ -302,11 +302,11 @@
1170 * property.
1171 *
1172 */
1173-static int des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen,
1174- u32 *flags)
1175+static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key,
1176+ unsigned int keylen, u32 *flags)
1177 {
1178 int i, ret;
1179- struct crypt_s390_des3_192_ctx *dctx = ctx;
1180+ struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm);
1181 const u8* temp_key = key;
1182
1183 if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
1184@@ -325,17 +325,17 @@
1185 return 0;
1186 }
1187
1188-static void des3_192_encrypt(void *ctx, u8 *dst, const u8 *src)
1189+static void des3_192_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1190 {
1191- struct crypt_s390_des3_192_ctx *dctx = ctx;
1192+ struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm);
1193
1194 crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src,
1195 DES3_192_BLOCK_SIZE);
1196 }
1197
1198-static void des3_192_decrypt(void *ctx, u8 *dst, const u8 *src)
1199+static void des3_192_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1200 {
1201- struct crypt_s390_des3_192_ctx *dctx = ctx;
1202+ struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm);
1203
1204 crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src,
1205 DES3_192_BLOCK_SIZE);
1206Index: linux-2.6.16.50/arch/s390/crypto/sha1_s390.c
1207===================================================================
1208--- linux-2.6.16.50.orig/arch/s390/crypto/sha1_s390.c 2006-07-14 18:09:26.347436500 +1200
1209+++ linux-2.6.16.50/arch/s390/crypto/sha1_s390.c 2006-07-14 18:10:31.147486250 +1200
1210@@ -40,28 +40,29 @@
1211 u8 buffer[2 * SHA1_BLOCK_SIZE];
1212 };
1213
1214-static void
1215-sha1_init(void *ctx)
1216+static void sha1_init(struct crypto_tfm *tfm)
1217 {
1218- static const struct crypt_s390_sha1_ctx initstate = {
1219- .state = {
1220- 0x67452301,
1221- 0xEFCDAB89,
1222- 0x98BADCFE,
1223- 0x10325476,
1224- 0xC3D2E1F0
1225- },
1226+ struct crypt_s390_sha1_ctx *ctx = crypto_tfm_ctx(tfm);
1227+ static const u32 initstate[5] = {
1228+ 0x67452301,
1229+ 0xEFCDAB89,
1230+ 0x98BADCFE,
1231+ 0x10325476,
1232+ 0xC3D2E1F0
1233 };
1234- memcpy(ctx, &initstate, sizeof(initstate));
1235+
1236+ ctx->count = 0;
1237+ memcpy(ctx->state, &initstate, sizeof(initstate));
1238+ ctx->buf_len = 0;
1239 }
1240
1241-static void
1242-sha1_update(void *ctx, const u8 *data, unsigned int len)
1243+static void sha1_update(struct crypto_tfm *tfm, const u8 *data,
1244+ unsigned int len)
1245 {
1246 struct crypt_s390_sha1_ctx *sctx;
1247 long imd_len;
1248
1249- sctx = ctx;
1250+ sctx = crypto_tfm_ctx(tfm);
1251 sctx->count += len * 8; //message bit length
1252
1253 //anything in buffer yet? -> must be completed
1254@@ -110,10 +111,9 @@
1255 }
1256
1257 /* Add padding and return the message digest. */
1258-static void
1259-sha1_final(void* ctx, u8 *out)
1260+static void sha1_final(struct crypto_tfm *tfm, u8 *out)
1261 {
1262- struct crypt_s390_sha1_ctx *sctx = ctx;
1263+ struct crypt_s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm);
1264
1265 //must perform manual padding
1266 pad_message(sctx);
1267Index: linux-2.6.16.50/arch/s390/crypto/sha256_s390.c
1268===================================================================
1269--- linux-2.6.16.50.orig/arch/s390/crypto/sha256_s390.c 2006-07-14 18:09:26.347436500 +1200
1270+++ linux-2.6.16.50/arch/s390/crypto/sha256_s390.c 2006-07-14 18:10:31.151486500 +1200
1271@@ -31,9 +31,9 @@
1272 u8 buf[2 * SHA256_BLOCK_SIZE];
1273 };
1274
1275-static void sha256_init(void *ctx)
1276+static void sha256_init(struct crypto_tfm *tfm)
1277 {
1278- struct s390_sha256_ctx *sctx = ctx;
1279+ struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
1280
1281 sctx->state[0] = 0x6a09e667;
1282 sctx->state[1] = 0xbb67ae85;
1283@@ -44,12 +44,12 @@
1284 sctx->state[6] = 0x1f83d9ab;
1285 sctx->state[7] = 0x5be0cd19;
1286 sctx->count = 0;
1287- memset(sctx->buf, 0, sizeof(sctx->buf));
1288 }
1289
1290-static void sha256_update(void *ctx, const u8 *data, unsigned int len)
1291+static void sha256_update(struct crypto_tfm *tfm, const u8 *data,
1292+ unsigned int len)
1293 {
1294- struct s390_sha256_ctx *sctx = ctx;
1295+ struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
1296 unsigned int index;
1297 int ret;
1298
1299@@ -108,9 +108,9 @@
1300 }
1301
1302 /* Add padding and return the message digest */
1303-static void sha256_final(void* ctx, u8 *out)
1304+static void sha256_final(struct crypto_tfm *tfm, u8 *out)
1305 {
1306- struct s390_sha256_ctx *sctx = ctx;
1307+ struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
1308
1309 /* must perform manual padding */
1310 pad_message(sctx);
1311Index: linux-2.6.16.50/arch/x86_64/crypto/aes-x86_64-asm.S
1312===================================================================
1313--- linux-2.6.16.50.orig/arch/x86_64/crypto/aes-x86_64-asm.S 2006-07-14 18:09:26.339436000 +1200
1314+++ linux-2.6.16.50/arch/x86_64/crypto/aes-x86_64-asm.S 2006-07-14 18:10:31.151486500 +1200
1315@@ -15,6 +15,10 @@
1316
1317 .text
1318
1319+#include <asm/asm-offsets.h>
1320+
1321+#define BASE crypto_tfm_ctx_offset
1322+
1323 #define R1 %rax
1324 #define R1E %eax
1325 #define R1X %ax
1326@@ -46,19 +50,19 @@
1327 #define R10 %r10
1328 #define R11 %r11
1329
1330-#define prologue(FUNC,BASE,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \
1331+#define prologue(FUNC,KEY,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \
1332 .global FUNC; \
1333 .type FUNC,@function; \
1334 .align 8; \
1335 FUNC: movq r1,r2; \
1336 movq r3,r4; \
1337- leaq BASE+52(r8),r9; \
1338+ leaq BASE+KEY+52(r8),r9; \
1339 movq r10,r11; \
1340 movl (r7),r5 ## E; \
1341 movl 4(r7),r1 ## E; \
1342 movl 8(r7),r6 ## E; \
1343 movl 12(r7),r7 ## E; \
1344- movl (r8),r10 ## E; \
1345+ movl BASE(r8),r10 ## E; \
1346 xorl -48(r9),r5 ## E; \
1347 xorl -44(r9),r1 ## E; \
1348 xorl -40(r9),r6 ## E; \
1349@@ -128,8 +132,8 @@
1350 movl r3 ## E,r1 ## E; \
1351 movl r4 ## E,r2 ## E;
1352
1353-#define entry(FUNC,BASE,B128,B192) \
1354- prologue(FUNC,BASE,B128,B192,R2,R8,R7,R9,R1,R3,R4,R6,R10,R5,R11)
1355+#define entry(FUNC,KEY,B128,B192) \
1356+ prologue(FUNC,KEY,B128,B192,R2,R8,R7,R9,R1,R3,R4,R6,R10,R5,R11)
1357
1358 #define return epilogue(R8,R2,R9,R7,R5,R6,R3,R4,R11)
1359
1360@@ -147,9 +151,9 @@
1361 #define decrypt_final(TAB,OFFSET) \
1362 round(TAB,OFFSET,R2,R1,R4,R3,R6,R5,R7,R10,R5,R6,R3,R4)
1363
1364-/* void aes_encrypt(void *ctx, u8 *out, const u8 *in) */
1365+/* void aes_enc_blk(stuct crypto_tfm *tfm, u8 *out, const u8 *in) */
1366
1367- entry(aes_encrypt,0,enc128,enc192)
1368+ entry(aes_enc_blk,0,enc128,enc192)
1369 encrypt_round(aes_ft_tab,-96)
1370 encrypt_round(aes_ft_tab,-80)
1371 enc192: encrypt_round(aes_ft_tab,-64)
1372@@ -166,9 +170,9 @@
1373 encrypt_final(aes_fl_tab,112)
1374 return
1375
1376-/* void aes_decrypt(void *ctx, u8 *out, const u8 *in) */
1377+/* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in) */
1378
1379- entry(aes_decrypt,240,dec128,dec192)
1380+ entry(aes_dec_blk,240,dec128,dec192)
1381 decrypt_round(aes_it_tab,-96)
1382 decrypt_round(aes_it_tab,-80)
1383 dec192: decrypt_round(aes_it_tab,-64)
1384Index: linux-2.6.16.50/arch/x86_64/kernel/asm-offsets.c
1385===================================================================
1386--- linux-2.6.16.50.orig/arch/x86_64/kernel/asm-offsets.c 2006-07-14 18:09:26.339436000 +1200
1387+++ linux-2.6.16.50/arch/x86_64/kernel/asm-offsets.c 2006-07-14 18:10:31.155486750 +1200
1388@@ -68,5 +68,7 @@
1389 DEFINE(pbe_next, offsetof(struct pbe, next));
1390 BLANK();
1391 DEFINE(TSS_ist, offsetof(struct tss_struct, ist));
1392+ BLANK();
1393+ DEFINE(crypto_tfm_ctx_offset, offsetof(struct crypto_tfm, __crt_ctx));
1394 return 0;
1395 }
1396Index: linux-2.6.16.50/crypto/Kconfig
1397===================================================================
1398--- linux-2.6.16.50.orig/crypto/Kconfig 2006-07-14 18:09:26.359437250 +1200
1399+++ linux-2.6.16.50/crypto/Kconfig 2006-07-14 18:10:31.159487000 +1200
1400@@ -337,7 +337,7 @@
1401
1402 config CRYPTO_TEST
1403 tristate "Testing module"
1404- depends on CRYPTO
1405+ depends on CRYPTO && m
1406 help
1407 Quick & dirty crypto test module.
1408
1409Index: linux-2.6.16.50/crypto/anubis.c
1410===================================================================
1411--- linux-2.6.16.50.orig/crypto/anubis.c 2006-07-14 18:09:26.359437250 +1200
1412+++ linux-2.6.16.50/crypto/anubis.c 2006-07-14 18:10:31.163487250 +1200
1413@@ -460,16 +460,15 @@
1414 0xf726ffedU, 0xe89d6f8eU, 0x19a0f089U,
1415 };
1416
1417-static int anubis_setkey(void *ctx_arg, const u8 *in_key,
1418+static int anubis_setkey(struct crypto_tfm *tfm, const u8 *in_key,
1419 unsigned int key_len, u32 *flags)
1420 {
1421+ struct anubis_ctx *ctx = crypto_tfm_ctx(tfm);
1422 const __be32 *key = (const __be32 *)in_key;
1423 int N, R, i, r;
1424 u32 kappa[ANUBIS_MAX_N];
1425 u32 inter[ANUBIS_MAX_N];
1426
1427- struct anubis_ctx *ctx = ctx_arg;
1428-
1429 switch (key_len)
1430 {
1431 case 16: case 20: case 24: case 28:
1432@@ -660,15 +659,15 @@
1433 dst[i] = cpu_to_be32(inter[i]);
1434 }
1435
1436-static void anubis_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
1437+static void anubis_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1438 {
1439- struct anubis_ctx *ctx = ctx_arg;
1440+ struct anubis_ctx *ctx = crypto_tfm_ctx(tfm);
1441 anubis_crypt(ctx->E, dst, src, ctx->R);
1442 }
1443
1444-static void anubis_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
1445+static void anubis_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1446 {
1447- struct anubis_ctx *ctx = ctx_arg;
1448+ struct anubis_ctx *ctx = crypto_tfm_ctx(tfm);
1449 anubis_crypt(ctx->D, dst, src, ctx->R);
1450 }
1451
1452Index: linux-2.6.16.50/crypto/arc4.c
1453===================================================================
1454--- linux-2.6.16.50.orig/crypto/arc4.c 2006-07-14 18:09:26.359437250 +1200
1455+++ linux-2.6.16.50/crypto/arc4.c 2006-07-14 18:10:31.163487250 +1200
1456@@ -24,9 +24,10 @@
1457 u8 x, y;
1458 };
1459
1460-static int arc4_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags)
1461+static int arc4_set_key(struct crypto_tfm *tfm, const u8 *in_key,
1462+ unsigned int key_len, u32 *flags)
1463 {
1464- struct arc4_ctx *ctx = ctx_arg;
1465+ struct arc4_ctx *ctx = crypto_tfm_ctx(tfm);
1466 int i, j = 0, k = 0;
1467
1468 ctx->x = 1;
1469@@ -48,9 +49,9 @@
1470 return 0;
1471 }
1472
1473-static void arc4_crypt(void *ctx_arg, u8 *out, const u8 *in)
1474+static void arc4_crypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1475 {
1476- struct arc4_ctx *ctx = ctx_arg;
1477+ struct arc4_ctx *ctx = crypto_tfm_ctx(tfm);
1478
1479 u8 *const S = ctx->S;
1480 u8 x = ctx->x;
1481Index: linux-2.6.16.50/crypto/blowfish.c
1482===================================================================
1483--- linux-2.6.16.50.orig/crypto/blowfish.c 2006-07-14 18:09:26.363437500 +1200
1484+++ linux-2.6.16.50/crypto/blowfish.c 2006-07-14 18:10:31.167487500 +1200
1485@@ -349,7 +349,7 @@
1486 dst[1] = yl;
1487 }
1488
1489-static void bf_encrypt(void *ctx, u8 *dst, const u8 *src)
1490+static void bf_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1491 {
1492 const __be32 *in_blk = (const __be32 *)src;
1493 __be32 *const out_blk = (__be32 *)dst;
1494@@ -357,17 +357,18 @@
1495
1496 in32[0] = be32_to_cpu(in_blk[0]);
1497 in32[1] = be32_to_cpu(in_blk[1]);
1498- encrypt_block(ctx, out32, in32);
1499+ encrypt_block(crypto_tfm_ctx(tfm), out32, in32);
1500 out_blk[0] = cpu_to_be32(out32[0]);
1501 out_blk[1] = cpu_to_be32(out32[1]);
1502 }
1503
1504-static void bf_decrypt(void *ctx, u8 *dst, const u8 *src)
1505+static void bf_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1506 {
1507+ struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
1508 const __be32 *in_blk = (const __be32 *)src;
1509 __be32 *const out_blk = (__be32 *)dst;
1510- const u32 *P = ((struct bf_ctx *)ctx)->p;
1511- const u32 *S = ((struct bf_ctx *)ctx)->s;
1512+ const u32 *P = ctx->p;
1513+ const u32 *S = ctx->s;
1514 u32 yl = be32_to_cpu(in_blk[0]);
1515 u32 yr = be32_to_cpu(in_blk[1]);
1516
1517@@ -398,12 +399,14 @@
1518 /*
1519 * Calculates the blowfish S and P boxes for encryption and decryption.
1520 */
1521-static int bf_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
1522+static int bf_setkey(struct crypto_tfm *tfm, const u8 *key,
1523+ unsigned int keylen, u32 *flags)
1524 {
1525+ struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
1526+ u32 *P = ctx->p;
1527+ u32 *S = ctx->s;
1528 short i, j, count;
1529 u32 data[2], temp;
1530- u32 *P = ((struct bf_ctx *)ctx)->p;
1531- u32 *S = ((struct bf_ctx *)ctx)->s;
1532
1533 /* Copy the initialization s-boxes */
1534 for (i = 0, count = 0; i < 256; i++)
1535Index: linux-2.6.16.50/crypto/cast5.c
1536===================================================================
1537--- linux-2.6.16.50.orig/crypto/cast5.c 2006-07-14 18:09:26.363437500 +1200
1538+++ linux-2.6.16.50/crypto/cast5.c 2006-07-14 18:10:31.171487750 +1200
1539@@ -577,9 +577,9 @@
1540 (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]) )
1541
1542
1543-static void cast5_encrypt(void *ctx, u8 * outbuf, const u8 * inbuf)
1544+static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
1545 {
1546- struct cast5_ctx *c = (struct cast5_ctx *) ctx;
1547+ struct cast5_ctx *c = crypto_tfm_ctx(tfm);
1548 const __be32 *src = (const __be32 *)inbuf;
1549 __be32 *dst = (__be32 *)outbuf;
1550 u32 l, r, t;
1551@@ -642,9 +642,9 @@
1552 dst[1] = cpu_to_be32(l);
1553 }
1554
1555-static void cast5_decrypt(void *ctx, u8 * outbuf, const u8 * inbuf)
1556+static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
1557 {
1558- struct cast5_ctx *c = (struct cast5_ctx *) ctx;
1559+ struct cast5_ctx *c = crypto_tfm_ctx(tfm);
1560 const __be32 *src = (const __be32 *)inbuf;
1561 __be32 *dst = (__be32 *)outbuf;
1562 u32 l, r, t;
1563@@ -769,15 +769,15 @@
1564 }
1565
1566
1567-static int
1568-cast5_setkey(void *ctx, const u8 * key, unsigned key_len, u32 * flags)
1569+static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key,
1570+ unsigned key_len, u32 *flags)
1571 {
1572+ struct cast5_ctx *c = crypto_tfm_ctx(tfm);
1573 int i;
1574 u32 x[4];
1575 u32 z[4];
1576 u32 k[16];
1577 __be32 p_key[4];
1578- struct cast5_ctx *c = (struct cast5_ctx *) ctx;
1579
1580 if (key_len < 5 || key_len > 16) {
1581 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
1582Index: linux-2.6.16.50/crypto/cast6.c
1583===================================================================
1584--- linux-2.6.16.50.orig/crypto/cast6.c 2006-07-14 18:09:26.363437500 +1200
1585+++ linux-2.6.16.50/crypto/cast6.c 2006-07-14 18:10:31.175488000 +1200
1586@@ -381,13 +381,13 @@
1587 key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]);
1588 }
1589
1590-static int
1591-cast6_setkey(void *ctx, const u8 * in_key, unsigned key_len, u32 * flags)
1592+static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key,
1593+ unsigned key_len, u32 *flags)
1594 {
1595 int i;
1596 u32 key[8];
1597 __be32 p_key[8]; /* padded key */
1598- struct cast6_ctx *c = (struct cast6_ctx *) ctx;
1599+ struct cast6_ctx *c = crypto_tfm_ctx(tfm);
1600
1601 if (key_len < 16 || key_len > 32 || key_len % 4 != 0) {
1602 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
1603@@ -444,8 +444,9 @@
1604 block[2] ^= F1(block[3], Kr[0], Km[0]);
1605 }
1606
1607-static void cast6_encrypt (void * ctx, u8 * outbuf, const u8 * inbuf) {
1608- struct cast6_ctx * c = (struct cast6_ctx *)ctx;
1609+static void cast6_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
1610+{
1611+ struct cast6_ctx *c = crypto_tfm_ctx(tfm);
1612 const __be32 *src = (const __be32 *)inbuf;
1613 __be32 *dst = (__be32 *)outbuf;
1614 u32 block[4];
1615@@ -476,8 +477,8 @@
1616 dst[3] = cpu_to_be32(block[3]);
1617 }
1618
1619-static void cast6_decrypt (void * ctx, u8 * outbuf, const u8 * inbuf) {
1620- struct cast6_ctx * c = (struct cast6_ctx *)ctx;
1621+static void cast6_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) {
1622+ struct cast6_ctx * c = crypto_tfm_ctx(tfm);
1623 const __be32 *src = (const __be32 *)inbuf;
1624 __be32 *dst = (__be32 *)outbuf;
1625 u32 block[4];
1626Index: linux-2.6.16.50/crypto/cipher.c
1627===================================================================
1628--- linux-2.6.16.50.orig/crypto/cipher.c 2006-07-14 18:09:26.367437750 +1200
1629+++ linux-2.6.16.50/crypto/cipher.c 2006-07-14 18:10:31.179488250 +1200
1630@@ -187,7 +187,7 @@
1631 void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block;
1632 int bsize = crypto_tfm_alg_blocksize(tfm);
1633
1634- void (*fn)(void *, u8 *, const u8 *) = desc->crfn;
1635+ void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn;
1636 u8 *iv = desc->info;
1637 unsigned int done = 0;
1638
1639@@ -195,7 +195,7 @@
1640
1641 do {
1642 xor(iv, src);
1643- fn(crypto_tfm_ctx(tfm), dst, iv);
1644+ fn(tfm, dst, iv);
1645 memcpy(iv, dst, bsize);
1646
1647 src += bsize;
1648@@ -218,7 +218,7 @@
1649 u8 *buf = (u8 *)ALIGN((unsigned long)stack, alignmask + 1);
1650 u8 **dst_p = src == dst ? &buf : &dst;
1651
1652- void (*fn)(void *, u8 *, const u8 *) = desc->crfn;
1653+ void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn;
1654 u8 *iv = desc->info;
1655 unsigned int done = 0;
1656
1657@@ -227,7 +227,7 @@
1658 do {
1659 u8 *tmp_dst = *dst_p;
1660
1661- fn(crypto_tfm_ctx(tfm), tmp_dst, src);
1662+ fn(tfm, tmp_dst, src);
1663 xor(tmp_dst, iv);
1664 memcpy(iv, src, bsize);
1665 if (tmp_dst != dst)
1666@@ -245,13 +245,13 @@
1667 {
1668 struct crypto_tfm *tfm = desc->tfm;
1669 int bsize = crypto_tfm_alg_blocksize(tfm);
1670- void (*fn)(void *, u8 *, const u8 *) = desc->crfn;
1671+ void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn;
1672 unsigned int done = 0;
1673
1674 nbytes -= bsize;
1675
1676 do {
1677- fn(crypto_tfm_ctx(tfm), dst, src);
1678+ fn(tfm, dst, src);
1679
1680 src += bsize;
1681 dst += bsize;
1682@@ -268,7 +268,7 @@
1683 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
1684 return -EINVAL;
1685 } else
1686- return cia->cia_setkey(crypto_tfm_ctx(tfm), key, keylen,
1687+ return cia->cia_setkey(tfm, key, keylen,
1688 &tfm->crt_flags);
1689 }
1690
1691Index: linux-2.6.16.50/crypto/compress.c
1692===================================================================
1693--- linux-2.6.16.50.orig/crypto/compress.c 2006-07-14 18:09:26.367437750 +1200
1694+++ linux-2.6.16.50/crypto/compress.c 2006-07-14 18:10:31.183488500 +1200
1695@@ -22,8 +22,7 @@
1696 const u8 *src, unsigned int slen,
1697 u8 *dst, unsigned int *dlen)
1698 {
1699- return tfm->__crt_alg->cra_compress.coa_compress(crypto_tfm_ctx(tfm),
1700- src, slen, dst,
1701+ return tfm->__crt_alg->cra_compress.coa_compress(tfm, src, slen, dst,
1702 dlen);
1703 }
1704
1705@@ -31,8 +30,7 @@
1706 const u8 *src, unsigned int slen,
1707 u8 *dst, unsigned int *dlen)
1708 {
1709- return tfm->__crt_alg->cra_compress.coa_decompress(crypto_tfm_ctx(tfm),
1710- src, slen, dst,
1711+ return tfm->__crt_alg->cra_compress.coa_decompress(tfm, src, slen, dst,
1712 dlen);
1713 }
1714
1715@@ -43,21 +41,14 @@
1716
1717 int crypto_init_compress_ops(struct crypto_tfm *tfm)
1718 {
1719- int ret = 0;
1720 struct compress_tfm *ops = &tfm->crt_compress;
1721-
1722- ret = tfm->__crt_alg->cra_compress.coa_init(crypto_tfm_ctx(tfm));
1723- if (ret)
1724- goto out;
1725
1726 ops->cot_compress = crypto_compress;
1727 ops->cot_decompress = crypto_decompress;
1728
1729-out:
1730- return ret;
1731+ return 0;
1732 }
1733
1734 void crypto_exit_compress_ops(struct crypto_tfm *tfm)
1735 {
1736- tfm->__crt_alg->cra_compress.coa_exit(crypto_tfm_ctx(tfm));
1737 }
1738Index: linux-2.6.16.50/crypto/crc32c.c
1739===================================================================
1740--- linux-2.6.16.50.orig/crypto/crc32c.c 2006-07-14 18:09:26.367437750 +1200
1741+++ linux-2.6.16.50/crypto/crc32c.c 2006-07-14 18:10:31.183488500 +1200
1742@@ -31,9 +31,9 @@
1743 * crc using table.
1744 */
1745
1746-static void chksum_init(void *ctx)
1747+static void chksum_init(struct crypto_tfm *tfm)
1748 {
1749- struct chksum_ctx *mctx = ctx;
1750+ struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
1751
1752 mctx->crc = ~(u32)0; /* common usage */
1753 }
1754@@ -43,10 +43,10 @@
1755 * If your algorithm starts with ~0, then XOR with ~0 before you set
1756 * the seed.
1757 */
1758-static int chksum_setkey(void *ctx, const u8 *key, unsigned int keylen,
1759- u32 *flags)
1760+static int chksum_setkey(struct crypto_tfm *tfm, const u8 *key,
1761+ unsigned int keylen, u32 *flags)
1762 {
1763- struct chksum_ctx *mctx = ctx;
1764+ struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
1765
1766 if (keylen != sizeof(mctx->crc)) {
1767 if (flags)
1768@@ -57,9 +57,10 @@
1769 return 0;
1770 }
1771
1772-static void chksum_update(void *ctx, const u8 *data, unsigned int length)
1773+static void chksum_update(struct crypto_tfm *tfm, const u8 *data,
1774+ unsigned int length)
1775 {
1776- struct chksum_ctx *mctx = ctx;
1777+ struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
1778 u32 mcrc;
1779
1780 mcrc = crc32c(mctx->crc, data, (size_t)length);
1781@@ -67,9 +68,9 @@
1782 mctx->crc = mcrc;
1783 }
1784
1785-static void chksum_final(void *ctx, u8 *out)
1786+static void chksum_final(struct crypto_tfm *tfm, u8 *out)
1787 {
1788- struct chksum_ctx *mctx = ctx;
1789+ struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
1790 u32 mcrc = (mctx->crc ^ ~(u32)0);
1791
1792 *(u32 *)out = __le32_to_cpu(mcrc);
1793Index: linux-2.6.16.50/crypto/crypto_null.c
1794===================================================================
1795--- linux-2.6.16.50.orig/crypto/crypto_null.c 2006-07-14 18:09:26.371438000 +1200
1796+++ linux-2.6.16.50/crypto/crypto_null.c 2006-07-14 18:10:31.187488750 +1200
1797@@ -27,8 +27,8 @@
1798 #define NULL_BLOCK_SIZE 1
1799 #define NULL_DIGEST_SIZE 0
1800
1801-static int null_compress(void *ctx, const u8 *src, unsigned int slen,
1802- u8 *dst, unsigned int *dlen)
1803+static int null_compress(struct crypto_tfm *tfm, const u8 *src,
1804+ unsigned int slen, u8 *dst, unsigned int *dlen)
1805 {
1806 if (slen > *dlen)
1807 return -EINVAL;
1808@@ -37,20 +37,21 @@
1809 return 0;
1810 }
1811
1812-static void null_init(void *ctx)
1813+static void null_init(struct crypto_tfm *tfm)
1814 { }
1815
1816-static void null_update(void *ctx, const u8 *data, unsigned int len)
1817+static void null_update(struct crypto_tfm *tfm, const u8 *data,
1818+ unsigned int len)
1819 { }
1820
1821-static void null_final(void *ctx, u8 *out)
1822+static void null_final(struct crypto_tfm *tfm, u8 *out)
1823 { }
1824
1825-static int null_setkey(void *ctx, const u8 *key,
1826- unsigned int keylen, u32 *flags)
1827+static int null_setkey(struct crypto_tfm *tfm, const u8 *key,
1828+ unsigned int keylen, u32 *flags)
1829 { return 0; }
1830
1831-static void null_crypt(void *ctx, u8 *dst, const u8 *src)
1832+static void null_crypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1833 {
1834 memcpy(dst, src, NULL_BLOCK_SIZE);
1835 }
1836Index: linux-2.6.16.50/crypto/digest.c
1837===================================================================
1838--- linux-2.6.16.50.orig/crypto/digest.c 2006-07-14 18:09:26.371438000 +1200
1839+++ linux-2.6.16.50/crypto/digest.c 2006-07-14 18:10:31.191489000 +1200
1840@@ -20,13 +20,14 @@
1841
1842 static void init(struct crypto_tfm *tfm)
1843 {
1844- tfm->__crt_alg->cra_digest.dia_init(crypto_tfm_ctx(tfm));
1845+ tfm->__crt_alg->cra_digest.dia_init(tfm);
1846 }
1847
1848 static void update(struct crypto_tfm *tfm,
1849 struct scatterlist *sg, unsigned int nsg)
1850 {
1851 unsigned int i;
1852+ unsigned int alignmask = crypto_tfm_alg_alignmask(tfm);
1853
1854 for (i = 0; i < nsg; i++) {
1855
1856@@ -38,12 +39,22 @@
1857 unsigned int bytes_from_page = min(l, ((unsigned int)
1858 (PAGE_SIZE)) -
1859 offset);
1860- char *p = crypto_kmap(pg, 0) + offset;
1861+ char *src = crypto_kmap(pg, 0);
1862+ char *p = src + offset;
1863
1864- tfm->__crt_alg->cra_digest.dia_update
1865- (crypto_tfm_ctx(tfm), p,
1866- bytes_from_page);
1867- crypto_kunmap(p, 0);
1868+ if (unlikely(offset & alignmask)) {
1869+ unsigned int bytes =
1870+ alignmask + 1 - (offset & alignmask);
1871+ bytes = min(bytes, bytes_from_page);
1872+ tfm->__crt_alg->cra_digest.dia_update(tfm, p,
1873+ bytes);
1874+ p += bytes;
1875+ bytes_from_page -= bytes;
1876+ l -= bytes;
1877+ }
1878+ tfm->__crt_alg->cra_digest.dia_update(tfm, p,
1879+ bytes_from_page);
1880+ crypto_kunmap(src, 0);
1881 crypto_yield(tfm);
1882 offset = 0;
1883 pg++;
1884@@ -54,7 +65,15 @@
1885
1886 static void final(struct crypto_tfm *tfm, u8 *out)
1887 {
1888- tfm->__crt_alg->cra_digest.dia_final(crypto_tfm_ctx(tfm), out);
1889+ unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
1890+ if (unlikely((unsigned long)out & alignmask)) {
1891+ unsigned int size = crypto_tfm_alg_digestsize(tfm);
1892+ u8 buffer[size + alignmask];
1893+ u8 *dst = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
1894+ tfm->__crt_alg->cra_digest.dia_final(tfm, dst);
1895+ memcpy(out, dst, size);
1896+ } else
1897+ tfm->__crt_alg->cra_digest.dia_final(tfm, out);
1898 }
1899
1900 static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
1901@@ -62,25 +81,15 @@
1902 u32 flags;
1903 if (tfm->__crt_alg->cra_digest.dia_setkey == NULL)
1904 return -ENOSYS;
1905- return tfm->__crt_alg->cra_digest.dia_setkey(crypto_tfm_ctx(tfm),
1906- key, keylen, &flags);
1907+ return tfm->__crt_alg->cra_digest.dia_setkey(tfm, key, keylen, &flags);
1908 }
1909
1910 static void digest(struct crypto_tfm *tfm,
1911 struct scatterlist *sg, unsigned int nsg, u8 *out)
1912 {
1913- unsigned int i;
1914-
1915- tfm->crt_digest.dit_init(tfm);
1916-
1917- for (i = 0; i < nsg; i++) {
1918- char *p = crypto_kmap(sg[i].page, 0) + sg[i].offset;
1919- tfm->__crt_alg->cra_digest.dia_update(crypto_tfm_ctx(tfm),
1920- p, sg[i].length);
1921- crypto_kunmap(p, 0);
1922- crypto_yield(tfm);
1923- }
1924- crypto_digest_final(tfm, out);
1925+ init(tfm);
1926+ update(tfm, sg, nsg);
1927+ final(tfm, out);
1928 }
1929
1930 int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags)
1931Index: linux-2.6.16.50/crypto/khazad.c
1932===================================================================
1933--- linux-2.6.16.50.orig/crypto/khazad.c 2006-07-14 18:09:26.371438000 +1200
1934+++ linux-2.6.16.50/crypto/khazad.c 2006-07-14 18:10:31.195489250 +1200
1935@@ -754,11 +754,11 @@
1936 0xccc41d14c363da5dULL, 0x5fdc7dcd7f5a6c5cULL, 0xf726ffede89d6f8eULL
1937 };
1938
1939-static int khazad_setkey(void *ctx_arg, const u8 *in_key,
1940- unsigned int key_len, u32 *flags)
1941+static int khazad_setkey(struct crypto_tfm *tfm, const u8 *in_key,
1942+ unsigned int key_len, u32 *flags)
1943 {
1944- struct khazad_ctx *ctx = ctx_arg;
1945- const __be64 *key = (const __be64 *)in_key;
1946+ struct khazad_ctx *ctx = crypto_tfm_ctx(tfm);
1947+ const __be32 *key = (const __be32 *)in_key;
1948 int r;
1949 const u64 *S = T7;
1950 u64 K2, K1;
1951@@ -769,8 +769,9 @@
1952 return -EINVAL;
1953 }
1954
1955- K2 = be64_to_cpu(key[0]);
1956- K1 = be64_to_cpu(key[1]);
1957+ /* key is supposed to be 32-bit aligned */
1958+ K2 = ((u64)be32_to_cpu(key[0]) << 32) | be32_to_cpu(key[1]);
1959+ K1 = ((u64)be32_to_cpu(key[2]) << 32) | be32_to_cpu(key[3]);
1960
1961 /* setup the encrypt key */
1962 for (r = 0; r <= KHAZAD_ROUNDS; r++) {
1963@@ -840,15 +841,15 @@
1964 *dst = cpu_to_be64(state);
1965 }
1966
1967-static void khazad_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
1968+static void khazad_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1969 {
1970- struct khazad_ctx *ctx = ctx_arg;
1971+ struct khazad_ctx *ctx = crypto_tfm_ctx(tfm);
1972 khazad_crypt(ctx->E, dst, src);
1973 }
1974
1975-static void khazad_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
1976+static void khazad_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
1977 {
1978- struct khazad_ctx *ctx = ctx_arg;
1979+ struct khazad_ctx *ctx = crypto_tfm_ctx(tfm);
1980 khazad_crypt(ctx->D, dst, src);
1981 }
1982
1983Index: linux-2.6.16.50/crypto/md4.c
1984===================================================================
1985--- linux-2.6.16.50.orig/crypto/md4.c 2006-07-14 18:09:26.375438250 +1200
1986+++ linux-2.6.16.50/crypto/md4.c 2006-07-14 18:10:31.199489500 +1200
1987@@ -152,9 +152,9 @@
1988 md4_transform(ctx->hash, ctx->block);
1989 }
1990
1991-static void md4_init(void *ctx)
1992+static void md4_init(struct crypto_tfm *tfm)
1993 {
1994- struct md4_ctx *mctx = ctx;
1995+ struct md4_ctx *mctx = crypto_tfm_ctx(tfm);
1996
1997 mctx->hash[0] = 0x67452301;
1998 mctx->hash[1] = 0xefcdab89;
1999@@ -163,9 +163,9 @@
2000 mctx->byte_count = 0;
2001 }
2002
2003-static void md4_update(void *ctx, const u8 *data, unsigned int len)
2004+static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
2005 {
2006- struct md4_ctx *mctx = ctx;
2007+ struct md4_ctx *mctx = crypto_tfm_ctx(tfm);
2008 const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
2009
2010 mctx->byte_count += len;
2011@@ -193,9 +193,9 @@
2012 memcpy(mctx->block, data, len);
2013 }
2014
2015-static void md4_final(void *ctx, u8 *out)
2016+static void md4_final(struct crypto_tfm *tfm, u8 *out)
2017 {
2018- struct md4_ctx *mctx = ctx;
2019+ struct md4_ctx *mctx = crypto_tfm_ctx(tfm);
2020 const unsigned int offset = mctx->byte_count & 0x3f;
2021 char *p = (char *)mctx->block + offset;
2022 int padding = 56 - (offset + 1);
2023Index: linux-2.6.16.50/crypto/md5.c
2024===================================================================
2025--- linux-2.6.16.50.orig/crypto/md5.c 2006-07-14 18:09:26.375438250 +1200
2026+++ linux-2.6.16.50/crypto/md5.c 2006-07-14 18:10:31.199489500 +1200
2027@@ -147,9 +147,9 @@
2028 md5_transform(ctx->hash, ctx->block);
2029 }
2030
2031-static void md5_init(void *ctx)
2032+static void md5_init(struct crypto_tfm *tfm)
2033 {
2034- struct md5_ctx *mctx = ctx;
2035+ struct md5_ctx *mctx = crypto_tfm_ctx(tfm);
2036
2037 mctx->hash[0] = 0x67452301;
2038 mctx->hash[1] = 0xefcdab89;
2039@@ -158,9 +158,9 @@
2040 mctx->byte_count = 0;
2041 }
2042
2043-static void md5_update(void *ctx, const u8 *data, unsigned int len)
2044+static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
2045 {
2046- struct md5_ctx *mctx = ctx;
2047+ struct md5_ctx *mctx = crypto_tfm_ctx(tfm);
2048 const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
2049
2050 mctx->byte_count += len;
2051@@ -188,9 +188,9 @@
2052 memcpy(mctx->block, data, len);
2053 }
2054
2055-static void md5_final(void *ctx, u8 *out)
2056+static void md5_final(struct crypto_tfm *tfm, u8 *out)
2057 {
2058- struct md5_ctx *mctx = ctx;
2059+ struct md5_ctx *mctx = crypto_tfm_ctx(tfm);
2060 const unsigned int offset = mctx->byte_count & 0x3f;
2061 char *p = (char *)mctx->block + offset;
2062 int padding = 56 - (offset + 1);
2063Index: linux-2.6.16.50/crypto/michael_mic.c
2064===================================================================
2065--- linux-2.6.16.50.orig/crypto/michael_mic.c 2006-07-14 18:09:26.375438250 +1200
2066+++ linux-2.6.16.50/crypto/michael_mic.c 2006-07-14 18:10:31.203489750 +1200
2067@@ -45,16 +45,17 @@
2068 } while (0)
2069
2070
2071-static void michael_init(void *ctx)
2072+static void michael_init(struct crypto_tfm *tfm)
2073 {
2074- struct michael_mic_ctx *mctx = ctx;
2075+ struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm);
2076 mctx->pending_len = 0;
2077 }
2078
2079
2080-static void michael_update(void *ctx, const u8 *data, unsigned int len)
2081+static void michael_update(struct crypto_tfm *tfm, const u8 *data,
2082+ unsigned int len)
2083 {
2084- struct michael_mic_ctx *mctx = ctx;
2085+ struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm);
2086 const __le32 *src;
2087
2088 if (mctx->pending_len) {
2089@@ -90,9 +91,9 @@
2090 }
2091
2092
2093-static void michael_final(void *ctx, u8 *out)
2094+static void michael_final(struct crypto_tfm *tfm, u8 *out)
2095 {
2096- struct michael_mic_ctx *mctx = ctx;
2097+ struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm);
2098 u8 *data = mctx->pending;
2099 __le32 *dst = (__le32 *)out;
2100
2101@@ -121,10 +122,10 @@
2102 }
2103
2104
2105-static int michael_setkey(void *ctx, const u8 *key, unsigned int keylen,
2106- u32 *flags)
2107+static int michael_setkey(struct crypto_tfm *tfm, const u8 *key,
2108+ unsigned int keylen, u32 *flags)
2109 {
2110- struct michael_mic_ctx *mctx = ctx;
2111+ struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm);
2112 const __le32 *data = (const __le32 *)key;
2113
2114 if (keylen != 8) {
2115@@ -145,6 +146,7 @@
2116 .cra_blocksize = 8,
2117 .cra_ctxsize = sizeof(struct michael_mic_ctx),
2118 .cra_module = THIS_MODULE,
2119+ .cra_alignmask = 3,
2120 .cra_list = LIST_HEAD_INIT(michael_mic_alg.cra_list),
2121 .cra_u = { .digest = {
2122 .dia_digestsize = 8,
2123Index: linux-2.6.16.50/crypto/sha1.c
2124===================================================================
2125--- linux-2.6.16.50.orig/crypto/sha1.c 2006-07-14 18:09:26.379438500 +1200
2126+++ linux-2.6.16.50/crypto/sha1.c 2006-07-18 01:35:17.455238201 +1200
2127@@ -34,9 +34,9 @@
2128 u8 buffer[64];
2129 };
2130
2131-static void sha1_init(void *ctx)
2132+static void sha1_init(struct crypto_tfm *tfm)
2133 {
2134- struct sha1_ctx *sctx = ctx;
2135+ struct sha1_ctx *sctx = crypto_tfm_ctx(tfm);
2136 static const struct sha1_ctx initstate = {
2137 0,
2138 { 0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0 },
2139@@ -46,9 +46,10 @@
2140 *sctx = initstate;
2141 }
2142
2143-static void sha1_update(void *ctx, const u8 *data, unsigned int len)
2144+static void sha1_update(struct crypto_tfm *tfm, const u8 *data,
2145+ unsigned int len)
2146 {
2147- struct sha1_ctx *sctx = ctx;
2148+ struct sha1_ctx *sctx = crypto_tfm_ctx(tfm);
2149 unsigned int partial, done;
2150 const u8 *src;
2151
2152@@ -80,9 +81,9 @@
2153
2154
2155 /* Add padding and return the message digest. */
2156-static void sha1_final(void* ctx, u8 *out)
2157+static void sha1_final(struct crypto_tfm *tfm, u8 *out)
2158 {
2159- struct sha1_ctx *sctx = ctx;
2160+ struct sha1_ctx *sctx = crypto_tfm_ctx(tfm);
2161 __be32 *dst = (__be32 *)out;
2162 u32 i, index, padlen;
2163 __be64 bits;
2164@@ -93,10 +94,10 @@
2165 /* Pad out to 56 mod 64 */
2166 index = sctx->count & 0x3f;
2167 padlen = (index < 56) ? (56 - index) : ((64+56) - index);
2168- sha1_update(sctx, padding, padlen);
2169+ sha1_update(tfm, padding, padlen);
2170
2171 /* Append length */
2172- sha1_update(sctx, (const u8 *)&bits, sizeof(bits));
2173+ sha1_update(tfm, (const u8 *)&bits, sizeof(bits));
2174
2175 /* Store state in digest */
2176 for (i = 0; i < 5; i++)
2177@@ -112,6 +113,7 @@
2178 .cra_blocksize = SHA1_HMAC_BLOCK_SIZE,
2179 .cra_ctxsize = sizeof(struct sha1_ctx),
2180 .cra_module = THIS_MODULE,
2181+ .cra_alignmask = 3,
2182 .cra_list = LIST_HEAD_INIT(alg.cra_list),
2183 .cra_u = { .digest = {
2184 .dia_digestsize = SHA1_DIGEST_SIZE,
2185Index: linux-2.6.16.50/crypto/sha256.c
2186===================================================================
2187--- linux-2.6.16.50.orig/crypto/sha256.c 2006-07-14 18:09:26.379438500 +1200
2188+++ linux-2.6.16.50/crypto/sha256.c 2006-07-18 01:35:17.455238201 +1200
2189@@ -230,9 +230,9 @@
2190 memset(W, 0, 64 * sizeof(u32));
2191 }
2192
2193-static void sha256_init(void *ctx)
2194+static void sha256_init(struct crypto_tfm *tfm)
2195 {
2196- struct sha256_ctx *sctx = ctx;
2197+ struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
2198 sctx->state[0] = H0;
2199 sctx->state[1] = H1;
2200 sctx->state[2] = H2;
2201@@ -242,12 +242,12 @@
2202 sctx->state[6] = H6;
2203 sctx->state[7] = H7;
2204 sctx->count[0] = sctx->count[1] = 0;
2205- memset(sctx->buf, 0, sizeof(sctx->buf));
2206 }
2207
2208-static void sha256_update(void *ctx, const u8 *data, unsigned int len)
2209+static void sha256_update(struct crypto_tfm *tfm, const u8 *data,
2210+ unsigned int len)
2211 {
2212- struct sha256_ctx *sctx = ctx;
2213+ struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
2214 unsigned int i, index, part_len;
2215
2216 /* Compute number of bytes mod 128 */
2217@@ -277,9 +277,9 @@
2218 memcpy(&sctx->buf[index], &data[i], len-i);
2219 }
2220
2221-static void sha256_final(void* ctx, u8 *out)
2222+static void sha256_final(struct crypto_tfm *tfm, u8 *out)
2223 {
2224- struct sha256_ctx *sctx = ctx;
2225+ struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
2226 __be32 *dst = (__be32 *)out;
2227 __be32 bits[2];
2228 unsigned int index, pad_len;
2229@@ -293,10 +293,10 @@
2230 /* Pad out to 56 mod 64. */
2231 index = (sctx->count[0] >> 3) & 0x3f;
2232 pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
2233- sha256_update(sctx, padding, pad_len);
2234+ sha256_update(tfm, padding, pad_len);
2235
2236 /* Append length (before padding) */
2237- sha256_update(sctx, (const u8 *)bits, sizeof(bits));
2238+ sha256_update(tfm, (const u8 *)bits, sizeof(bits));
2239
2240 /* Store state in digest */
2241 for (i = 0; i < 8; i++)
2242@@ -313,6 +313,7 @@
2243 .cra_blocksize = SHA256_HMAC_BLOCK_SIZE,
2244 .cra_ctxsize = sizeof(struct sha256_ctx),
2245 .cra_module = THIS_MODULE,
2246+ .cra_alignmask = 3,
2247 .cra_list = LIST_HEAD_INIT(alg.cra_list),
2248 .cra_u = { .digest = {
2249 .dia_digestsize = SHA256_DIGEST_SIZE,
2250Index: linux-2.6.16.50/crypto/sha512.c
2251===================================================================
2252--- linux-2.6.16.50.orig/crypto/sha512.c 2006-07-14 18:09:26.379438500 +1200
2253+++ linux-2.6.16.50/crypto/sha512.c 2006-07-14 18:10:31.211490250 +1200
2254@@ -161,9 +161,9 @@
2255 }
2256
2257 static void
2258-sha512_init(void *ctx)
2259+sha512_init(struct crypto_tfm *tfm)
2260 {
2261- struct sha512_ctx *sctx = ctx;
2262+ struct sha512_ctx *sctx = crypto_tfm_ctx(tfm);
2263 sctx->state[0] = H0;
2264 sctx->state[1] = H1;
2265 sctx->state[2] = H2;
2266@@ -173,13 +173,12 @@
2267 sctx->state[6] = H6;
2268 sctx->state[7] = H7;
2269 sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0;
2270- memset(sctx->buf, 0, sizeof(sctx->buf));
2271 }
2272
2273 static void
2274-sha384_init(void *ctx)
2275+sha384_init(struct crypto_tfm *tfm)
2276 {
2277- struct sha512_ctx *sctx = ctx;
2278+ struct sha512_ctx *sctx = crypto_tfm_ctx(tfm);
2279 sctx->state[0] = HP0;
2280 sctx->state[1] = HP1;
2281 sctx->state[2] = HP2;
2282@@ -189,13 +188,12 @@
2283 sctx->state[6] = HP6;
2284 sctx->state[7] = HP7;
2285 sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0;
2286- memset(sctx->buf, 0, sizeof(sctx->buf));
2287 }
2288
2289 static void
2290-sha512_update(void *ctx, const u8 *data, unsigned int len)
2291+sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
2292 {
2293- struct sha512_ctx *sctx = ctx;
2294+ struct sha512_ctx *sctx = crypto_tfm_ctx(tfm);
2295
2296 unsigned int i, index, part_len;
2297
2298@@ -233,9 +231,9 @@
2299 }
2300
2301 static void
2302-sha512_final(void *ctx, u8 *hash)
2303+sha512_final(struct crypto_tfm *tfm, u8 *hash)
2304 {
2305- struct sha512_ctx *sctx = ctx;
2306+ struct sha512_ctx *sctx = crypto_tfm_ctx(tfm);
2307 static u8 padding[128] = { 0x80, };
2308 __be64 *dst = (__be64 *)hash;
2309 __be32 bits[4];
2310@@ -251,10 +249,10 @@
2311 /* Pad out to 112 mod 128. */
2312 index = (sctx->count[0] >> 3) & 0x7f;
2313 pad_len = (index < 112) ? (112 - index) : ((128+112) - index);
2314- sha512_update(sctx, padding, pad_len);
2315+ sha512_update(tfm, padding, pad_len);
2316
2317 /* Append length (before padding) */
2318- sha512_update(sctx, (const u8 *)bits, sizeof(bits));
2319+ sha512_update(tfm, (const u8 *)bits, sizeof(bits));
2320
2321 /* Store state in digest */
2322 for (i = 0; i < 8; i++)
2323@@ -264,12 +262,11 @@
2324 memset(sctx, 0, sizeof(struct sha512_ctx));
2325 }
2326
2327-static void sha384_final(void *ctx, u8 *hash)
2328+static void sha384_final(struct crypto_tfm *tfm, u8 *hash)
2329 {
2330- struct sha512_ctx *sctx = ctx;
2331 u8 D[64];
2332
2333- sha512_final(sctx, D);
2334+ sha512_final(tfm, D);
2335
2336 memcpy(hash, D, 48);
2337 memset(D, 0, 64);
2338@@ -281,6 +278,7 @@
2339 .cra_blocksize = SHA512_HMAC_BLOCK_SIZE,
2340 .cra_ctxsize = sizeof(struct sha512_ctx),
2341 .cra_module = THIS_MODULE,
2342+ .cra_alignmask = 3,
2343 .cra_list = LIST_HEAD_INIT(sha512.cra_list),
2344 .cra_u = { .digest = {
2345 .dia_digestsize = SHA512_DIGEST_SIZE,
2346@@ -295,6 +293,7 @@
2347 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
2348 .cra_blocksize = SHA384_HMAC_BLOCK_SIZE,
2349 .cra_ctxsize = sizeof(struct sha512_ctx),
2350+ .cra_alignmask = 3,
2351 .cra_module = THIS_MODULE,
2352 .cra_list = LIST_HEAD_INIT(sha384.cra_list),
2353 .cra_u = { .digest = {
2354Index: linux-2.6.16.50/crypto/tcrypt.c
2355===================================================================
2356--- linux-2.6.16.50.orig/crypto/tcrypt.c 2006-07-14 18:09:26.379438500 +1200
2357+++ linux-2.6.16.50/crypto/tcrypt.c 2006-07-18 01:36:18.591058951 +1200
2358@@ -570,6 +570,122 @@
2359 crypto_free_tfm(tfm);
2360 }
2361
2362+static void test_digest_jiffies(struct crypto_tfm *tfm, char *p, int blen,
2363+ int plen, char *out, int sec)
2364+{
2365+ struct scatterlist sg[1];
2366+ unsigned long start, end;
2367+ int bcount, pcount;
2368+
2369+ for (start = jiffies, end = start + sec * HZ, bcount = 0;
2370+ time_before(jiffies, end); bcount++) {
2371+ crypto_digest_init(tfm);
2372+ for (pcount = 0; pcount < blen; pcount += plen) {
2373+ sg_set_buf(sg, p + pcount, plen);
2374+ crypto_digest_update(tfm, sg, 1);
2375+ }
2376+ /* we assume there is enough space in 'out' for the result */
2377+ crypto_digest_final(tfm, out);
2378+ }
2379+
2380+ printk("%6u opers/sec, %9lu bytes/sec\n",
2381+ bcount / sec, ((long)bcount * blen) / sec);
2382+
2383+ return;
2384+}
2385+
2386+static void test_digest_cycles(struct crypto_tfm *tfm, char *p, int blen,
2387+ int plen, char *out)
2388+{
2389+ struct scatterlist sg[1];
2390+ unsigned long cycles = 0;
2391+ int i, pcount;
2392+
2393+ local_bh_disable();
2394+ local_irq_disable();
2395+
2396+ /* Warm-up run. */
2397+ for (i = 0; i < 4; i++) {
2398+ crypto_digest_init(tfm);
2399+ for (pcount = 0; pcount < blen; pcount += plen) {
2400+ sg_set_buf(sg, p + pcount, plen);
2401+ crypto_digest_update(tfm, sg, 1);
2402+ }
2403+ crypto_digest_final(tfm, out);
2404+ }
2405+
2406+ /* The real thing. */
2407+ for (i = 0; i < 8; i++) {
2408+ cycles_t start, end;
2409+
2410+ crypto_digest_init(tfm);
2411+
2412+ start = get_cycles();
2413+
2414+ for (pcount = 0; pcount < blen; pcount += plen) {
2415+ sg_set_buf(sg, p + pcount, plen);
2416+ crypto_digest_update(tfm, sg, 1);
2417+ }
2418+ crypto_digest_final(tfm, out);
2419+
2420+ end = get_cycles();
2421+
2422+ cycles += end - start;
2423+ }
2424+
2425+ local_irq_enable();
2426+ local_bh_enable();
2427+
2428+ printk("%6lu cycles/operation, %4lu cycles/byte\n",
2429+ cycles / 8, cycles / (8 * blen));
2430+
2431+ return;
2432+}
2433+
2434+static void test_digest_speed(char *algo, unsigned int sec,
2435+ struct digest_speed *speed)
2436+{
2437+ struct crypto_tfm *tfm;
2438+ char output[1024];
2439+ int i;
2440+
2441+ printk("\ntesting speed of %s\n", algo);
2442+
2443+ tfm = crypto_alloc_tfm(algo, 0);
2444+
2445+ if (tfm == NULL) {
2446+ printk("failed to load transform for %s\n", algo);
2447+ return;
2448+ }
2449+
2450+ if (crypto_tfm_alg_digestsize(tfm) > sizeof(output)) {
2451+ printk("digestsize(%u) > outputbuffer(%zu)\n",
2452+ crypto_tfm_alg_digestsize(tfm), sizeof(output));
2453+ goto out;
2454+ }
2455+
2456+ for (i = 0; speed[i].blen != 0; i++) {
2457+ if (speed[i].blen > TVMEMSIZE) {
2458+ printk("template (%u) too big for tvmem (%u)\n",
2459+ speed[i].blen, TVMEMSIZE);
2460+ goto out;
2461+ }
2462+
2463+ printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ",
2464+ i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
2465+
2466+ memset(tvmem, 0xff, speed[i].blen);
2467+
2468+ if (sec)
2469+ test_digest_jiffies(tfm, tvmem, speed[i].blen, speed[i].plen, output, sec);
2470+ else
2471+ test_digest_cycles(tfm, tvmem, speed[i].blen, speed[i].plen, output);
2472+ }
2473+
2474+out:
2475+ crypto_free_tfm(tfm);
2476+}
2477+
2478 static void test_deflate(void)
2479 {
2480 unsigned int i;
2481@@ -1086,6 +1202,60 @@
2482 des_speed_template);
2483 break;
2484
2485+ case 300:
2486+ /* fall through */
2487+
2488+ case 301:
2489+ test_digest_speed("md4", sec, generic_digest_speed_template);
2490+ if (mode > 300 && mode < 400) break;
2491+
2492+ case 302:
2493+ test_digest_speed("md5", sec, generic_digest_speed_template);
2494+ if (mode > 300 && mode < 400) break;
2495+
2496+ case 303:
2497+ test_digest_speed("sha1", sec, generic_digest_speed_template);
2498+ if (mode > 300 && mode < 400) break;
2499+
2500+ case 304:
2501+ test_digest_speed("sha256", sec, generic_digest_speed_template);
2502+ if (mode > 300 && mode < 400) break;
2503+
2504+ case 305:
2505+ test_digest_speed("sha384", sec, generic_digest_speed_template);
2506+ if (mode > 300 && mode < 400) break;
2507+
2508+ case 306:
2509+ test_digest_speed("sha512", sec, generic_digest_speed_template);
2510+ if (mode > 300 && mode < 400) break;
2511+
2512+ case 307:
2513+ test_digest_speed("wp256", sec, generic_digest_speed_template);
2514+ if (mode > 300 && mode < 400) break;
2515+
2516+ case 308:
2517+ test_digest_speed("wp384", sec, generic_digest_speed_template);
2518+ if (mode > 300 && mode < 400) break;
2519+
2520+ case 309:
2521+ test_digest_speed("wp512", sec, generic_digest_speed_template);
2522+ if (mode > 300 && mode < 400) break;
2523+
2524+ case 310:
2525+ test_digest_speed("tgr128", sec, generic_digest_speed_template);
2526+ if (mode > 300 && mode < 400) break;
2527+
2528+ case 311:
2529+ test_digest_speed("tgr160", sec, generic_digest_speed_template);
2530+ if (mode > 300 && mode < 400) break;
2531+
2532+ case 312:
2533+ test_digest_speed("tgr192", sec, generic_digest_speed_template);
2534+ if (mode > 300 && mode < 400) break;
2535+
2536+ case 399:
2537+ break;
2538+
2539 case 1000:
2540 test_available();
2541 break;
2542@@ -1113,7 +1283,14 @@
2543
2544 kfree(xbuf);
2545 kfree(tvmem);
2546- return 0;
2547+
2548+ /* We intentionaly return -EAGAIN to prevent keeping
2549+ * the module. It does all its work from init()
2550+ * and doesn't offer any runtime functionality
2551+ * => we don't need it in the memory, do we?
2552+ * -- mludvig
2553+ */
2554+ return -EAGAIN;
2555 }
2556
2557 /*
2558Index: linux-2.6.16.50/crypto/tea.c
2559===================================================================
2560--- linux-2.6.16.50.orig/crypto/tea.c 2006-07-14 18:09:26.383438750 +1200
2561+++ linux-2.6.16.50/crypto/tea.c 2006-07-14 18:10:31.223491000 +1200
2562@@ -45,10 +45,10 @@
2563 u32 KEY[4];
2564 };
2565
2566-static int tea_setkey(void *ctx_arg, const u8 *in_key,
2567- unsigned int key_len, u32 *flags)
2568-{
2569- struct tea_ctx *ctx = ctx_arg;
2570+static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
2571+ unsigned int key_len, u32 *flags)
2572+{
2573+ struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
2574 const __le32 *key = (const __le32 *)in_key;
2575
2576 if (key_len != 16)
2577@@ -66,12 +66,11 @@
2578
2579 }
2580
2581-static void tea_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
2582-{
2583+static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
2584+{
2585 u32 y, z, n, sum = 0;
2586 u32 k0, k1, k2, k3;
2587-
2588- struct tea_ctx *ctx = ctx_arg;
2589+ struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
2590 const __le32 *in = (const __le32 *)src;
2591 __le32 *out = (__le32 *)dst;
2592
2593@@ -95,11 +94,11 @@
2594 out[1] = cpu_to_le32(z);
2595 }
2596
2597-static void tea_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
2598-{
2599+static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
2600+{
2601 u32 y, z, n, sum;
2602 u32 k0, k1, k2, k3;
2603- struct tea_ctx *ctx = ctx_arg;
2604+ struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
2605 const __le32 *in = (const __le32 *)src;
2606 __le32 *out = (__le32 *)dst;
2607
2608@@ -125,10 +124,10 @@
2609 out[1] = cpu_to_le32(z);
2610 }
2611
2612-static int xtea_setkey(void *ctx_arg, const u8 *in_key,
2613- unsigned int key_len, u32 *flags)
2614-{
2615- struct xtea_ctx *ctx = ctx_arg;
2616+static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
2617+ unsigned int key_len, u32 *flags)
2618+{
2619+ struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
2620 const __le32 *key = (const __le32 *)in_key;
2621
2622 if (key_len != 16)
2623@@ -146,12 +145,11 @@
2624
2625 }
2626
2627-static void xtea_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
2628-{
2629+static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
2630+{
2631 u32 y, z, sum = 0;
2632 u32 limit = XTEA_DELTA * XTEA_ROUNDS;
2633-
2634- struct xtea_ctx *ctx = ctx_arg;
2635+ struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
2636 const __le32 *in = (const __le32 *)src;
2637 __le32 *out = (__le32 *)dst;
2638
2639@@ -168,10 +166,10 @@
2640 out[1] = cpu_to_le32(z);
2641 }
2642
2643-static void xtea_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
2644-{
2645+static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
2646+{
2647 u32 y, z, sum;
2648- struct tea_ctx *ctx = ctx_arg;
2649+ struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
2650 const __le32 *in = (const __le32 *)src;
2651 __le32 *out = (__le32 *)dst;
2652
2653@@ -191,12 +189,11 @@
2654 }
2655
2656
2657-static void xeta_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
2658-{
2659+static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
2660+{
2661 u32 y, z, sum = 0;
2662 u32 limit = XTEA_DELTA * XTEA_ROUNDS;
2663-
2664- struct xtea_ctx *ctx = ctx_arg;
2665+ struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
2666 const __le32 *in = (const __le32 *)src;
2667 __le32 *out = (__le32 *)dst;
2668
2669@@ -213,10 +210,10 @@
2670 out[1] = cpu_to_le32(z);
2671 }
2672
2673-static void xeta_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
2674-{
2675+static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
2676+{
2677 u32 y, z, sum;
2678- struct tea_ctx *ctx = ctx_arg;
2679+ struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
2680 const __le32 *in = (const __le32 *)src;
2681 __le32 *out = (__le32 *)dst;
2682
2683Index: linux-2.6.16.50/crypto/tgr192.c
2684===================================================================
2685--- linux-2.6.16.50.orig/crypto/tgr192.c 2006-07-14 18:09:26.383438750 +1200
2686+++ linux-2.6.16.50/crypto/tgr192.c 2006-07-14 18:10:31.227491250 +1200
2687@@ -496,11 +496,10 @@
2688 tctx->c = c;
2689 }
2690
2691-static void tgr192_init(void *ctx)
2692+static void tgr192_init(struct crypto_tfm *tfm)
2693 {
2694- struct tgr192_ctx *tctx = ctx;
2695+ struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm);
2696
2697- memset (tctx->hash, 0, 64);
2698 tctx->a = 0x0123456789abcdefULL;
2699 tctx->b = 0xfedcba9876543210ULL;
2700 tctx->c = 0xf096a5b4c3b2e187ULL;
2701@@ -511,9 +510,10 @@
2702
2703 /* Update the message digest with the contents
2704 * of INBUF with length INLEN. */
2705-static void tgr192_update(void *ctx, const u8 * inbuf, unsigned int len)
2706+static void tgr192_update(struct crypto_tfm *tfm, const u8 *inbuf,
2707+ unsigned int len)
2708 {
2709- struct tgr192_ctx *tctx = ctx;
2710+ struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm);
2711
2712 if (tctx->count == 64) { /* flush the buffer */
2713 tgr192_transform(tctx, tctx->hash);
2714@@ -527,7 +527,7 @@
2715 for (; len && tctx->count < 64; len--) {
2716 tctx->hash[tctx->count++] = *inbuf++;
2717 }
2718- tgr192_update(tctx, NULL, 0);
2719+ tgr192_update(tfm, NULL, 0);
2720 if (!len) {
2721 return;
2722 }
2723@@ -549,15 +549,15 @@
2724
2725
2726 /* The routine terminates the computation */
2727-static void tgr192_final(void *ctx, u8 * out)
2728+static void tgr192_final(struct crypto_tfm *tfm, u8 * out)
2729 {
2730- struct tgr192_ctx *tctx = ctx;
2731+ struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm);
2732 __be64 *dst = (__be64 *)out;
2733 __be64 *be64p;
2734 __le32 *le32p;
2735 u32 t, msb, lsb;
2736
2737- tgr192_update(tctx, NULL, 0); /* flush */ ;
2738+ tgr192_update(tfm, NULL, 0); /* flush */ ;
2739
2740 msb = 0;
2741 t = tctx->nblocks;
2742@@ -585,7 +585,7 @@
2743 while (tctx->count < 64) {
2744 tctx->hash[tctx->count++] = 0;
2745 }
2746- tgr192_update(tctx, NULL, 0); /* flush */ ;
2747+ tgr192_update(tfm, NULL, 0); /* flush */ ;
2748 memset(tctx->hash, 0, 56); /* fill next block with zeroes */
2749 }
2750 /* append the 64 bit count */
2751@@ -601,22 +601,20 @@
2752 dst[2] = be64p[2] = cpu_to_be64(tctx->c);
2753 }
2754
2755-static void tgr160_final(void *ctx, u8 * out)
2756+static void tgr160_final(struct crypto_tfm *tfm, u8 * out)
2757 {
2758- struct tgr192_ctx *wctx = ctx;
2759 u8 D[64];
2760
2761- tgr192_final(wctx, D);
2762+ tgr192_final(tfm, D);
2763 memcpy(out, D, TGR160_DIGEST_SIZE);
2764 memset(D, 0, TGR192_DIGEST_SIZE);
2765 }
2766
2767-static void tgr128_final(void *ctx, u8 * out)
2768+static void tgr128_final(struct crypto_tfm *tfm, u8 * out)
2769 {
2770- struct tgr192_ctx *wctx = ctx;
2771 u8 D[64];
2772
2773- tgr192_final(wctx, D);
2774+ tgr192_final(tfm, D);
2775 memcpy(out, D, TGR128_DIGEST_SIZE);
2776 memset(D, 0, TGR192_DIGEST_SIZE);
2777 }
2778@@ -627,6 +625,7 @@
2779 .cra_blocksize = TGR192_BLOCK_SIZE,
2780 .cra_ctxsize = sizeof(struct tgr192_ctx),
2781 .cra_module = THIS_MODULE,
2782+ .cra_alignmask = 7,
2783 .cra_list = LIST_HEAD_INIT(tgr192.cra_list),
2784 .cra_u = {.digest = {
2785 .dia_digestsize = TGR192_DIGEST_SIZE,
2786@@ -641,6 +640,7 @@
2787 .cra_blocksize = TGR192_BLOCK_SIZE,
2788 .cra_ctxsize = sizeof(struct tgr192_ctx),
2789 .cra_module = THIS_MODULE,
2790+ .cra_alignmask = 7,
2791 .cra_list = LIST_HEAD_INIT(tgr160.cra_list),
2792 .cra_u = {.digest = {
2793 .dia_digestsize = TGR160_DIGEST_SIZE,
2794@@ -655,6 +655,7 @@
2795 .cra_blocksize = TGR192_BLOCK_SIZE,
2796 .cra_ctxsize = sizeof(struct tgr192_ctx),
2797 .cra_module = THIS_MODULE,
2798+ .cra_alignmask = 7,
2799 .cra_list = LIST_HEAD_INIT(tgr128.cra_list),
2800 .cra_u = {.digest = {
2801 .dia_digestsize = TGR128_DIGEST_SIZE,
2802Index: linux-2.6.16.50/crypto/wp512.c
2803===================================================================
2804--- linux-2.6.16.50.orig/crypto/wp512.c 2006-07-14 18:09:26.383438750 +1200
2805+++ linux-2.6.16.50/crypto/wp512.c 2006-07-14 18:10:31.235491750 +1200
2806@@ -981,9 +981,9 @@
2807
2808 }
2809
2810-static void wp512_init (void *ctx) {
2811+static void wp512_init(struct crypto_tfm *tfm) {
2812+ struct wp512_ctx *wctx = crypto_tfm_ctx(tfm);
2813 int i;
2814- struct wp512_ctx *wctx = ctx;
2815
2816 memset(wctx->bitLength, 0, 32);
2817 wctx->bufferBits = wctx->bufferPos = 0;
2818@@ -993,10 +993,10 @@
2819 }
2820 }
2821
2822-static void wp512_update(void *ctx, const u8 *source, unsigned int len)
2823+static void wp512_update(struct crypto_tfm *tfm, const u8 *source,
2824+ unsigned int len)
2825 {
2826-
2827- struct wp512_ctx *wctx = ctx;
2828+ struct wp512_ctx *wctx = crypto_tfm_ctx(tfm);
2829 int sourcePos = 0;
2830 unsigned int bits_len = len * 8; // convert to number of bits
2831 int sourceGap = (8 - ((int)bits_len & 7)) & 7;
2832@@ -1054,9 +1054,9 @@
2833
2834 }
2835
2836-static void wp512_final(void *ctx, u8 *out)
2837+static void wp512_final(struct crypto_tfm *tfm, u8 *out)
2838 {
2839- struct wp512_ctx *wctx = ctx;
2840+ struct wp512_ctx *wctx = crypto_tfm_ctx(tfm);
2841 int i;
2842 u8 *buffer = wctx->buffer;
2843 u8 *bitLength = wctx->bitLength;
2844@@ -1087,22 +1087,20 @@
2845 wctx->bufferPos = bufferPos;
2846 }
2847
2848-static void wp384_final(void *ctx, u8 *out)
2849+static void wp384_final(struct crypto_tfm *tfm, u8 *out)
2850 {
2851- struct wp512_ctx *wctx = ctx;
2852 u8 D[64];
2853
2854- wp512_final (wctx, D);
2855+ wp512_final(tfm, D);
2856 memcpy (out, D, WP384_DIGEST_SIZE);
2857 memset (D, 0, WP512_DIGEST_SIZE);
2858 }
2859
2860-static void wp256_final(void *ctx, u8 *out)
2861+static void wp256_final(struct crypto_tfm *tfm, u8 *out)
2862 {
2863- struct wp512_ctx *wctx = ctx;
2864 u8 D[64];
2865
2866- wp512_final (wctx, D);
2867+ wp512_final(tfm, D);
2868 memcpy (out, D, WP256_DIGEST_SIZE);
2869 memset (D, 0, WP512_DIGEST_SIZE);
2870 }
2871Index: linux-2.6.16.50/arch/i386/kernel/cpu/proc.c
2872===================================================================
2873--- linux-2.6.16.50.orig/arch/i386/kernel/cpu/proc.c 2006-07-15 00:03:51.220033250 +1200
2874+++ linux-2.6.16.50/arch/i386/kernel/cpu/proc.c 2006-07-15 00:04:02.552741500 +1200
2875@@ -52,7 +52,7 @@
2876
2877 /* VIA/Cyrix/Centaur-defined */
2878 NULL, NULL, "rng", "rng_en", NULL, NULL, "ace", "ace_en",
2879- NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
2880+ "ace2", "ace2_en", "phe", "phe_en", "pmm", "pmm_en", NULL, NULL,
2881 NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
2882 NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
2883
2884Index: linux-2.6.16.50/include/asm-i386/cpufeature.h
2885===================================================================
2886--- linux-2.6.16.50.orig/include/asm-i386/cpufeature.h 2006-07-15 00:03:51.648060000 +1200
2887+++ linux-2.6.16.50/include/asm-i386/cpufeature.h 2006-07-15 00:04:02.552741500 +1200
2888@@ -86,6 +86,12 @@
2889 #define X86_FEATURE_XSTORE_EN (5*32+ 3) /* on-CPU RNG enabled */
2890 #define X86_FEATURE_XCRYPT (5*32+ 6) /* on-CPU crypto (xcrypt insn) */
2891 #define X86_FEATURE_XCRYPT_EN (5*32+ 7) /* on-CPU crypto enabled */
2892+#define X86_FEATURE_ACE2 (5*32+ 8) /* Advanced Cryptography Engine v2 */
2893+#define X86_FEATURE_ACE2_EN (5*32+ 9) /* ACE v2 enabled */
2894+#define X86_FEATURE_PHE (5*32+ 10) /* PadLock Hash Engine */
2895+#define X86_FEATURE_PHE_EN (5*32+ 11) /* PHE enabled */
2896+#define X86_FEATURE_PMM (5*32+ 12) /* PadLock Montgomery Multiplier */
2897+#define X86_FEATURE_PMM_EN (5*32+ 13) /* PMM enabled */
2898
2899 /* More extended AMD flags: CPUID level 0x80000001, ecx, word 6 */
2900 #define X86_FEATURE_LAHF_LM (6*32+ 0) /* LAHF/SAHF in long mode */
2901@@ -119,6 +125,12 @@
2902 #define cpu_has_xstore_enabled boot_cpu_has(X86_FEATURE_XSTORE_EN)
2903 #define cpu_has_xcrypt boot_cpu_has(X86_FEATURE_XCRYPT)
2904 #define cpu_has_xcrypt_enabled boot_cpu_has(X86_FEATURE_XCRYPT_EN)
2905+#define cpu_has_ace2 boot_cpu_has(X86_FEATURE_ACE2)
2906+#define cpu_has_ace2_enabled boot_cpu_has(X86_FEATURE_ACE2_EN)
2907+#define cpu_has_phe boot_cpu_has(X86_FEATURE_PHE)
2908+#define cpu_has_phe_enabled boot_cpu_has(X86_FEATURE_PHE_EN)
2909+#define cpu_has_pmm boot_cpu_has(X86_FEATURE_PMM)
2910+#define cpu_has_pmm_enabled boot_cpu_has(X86_FEATURE_PMM_EN)
2911
2912 #endif /* __ASM_I386_CPUFEATURE_H */
2913