]>
git.ipfire.org Git - thirdparty/nettle.git/blob - chacha-crypt.c
3 The crypt function in the ChaCha stream cipher.
4 Heavily based on the Salsa20 implementation in Nettle.
6 Copyright (C) 2014 Niels Möller
7 Copyright (C) 2013 Joachim Strömbergson
8 Copyright (C) 2012 Simon Josefsson
10 This file is part of GNU Nettle.
12 GNU Nettle is free software: you can redistribute it and/or
13 modify it under the terms of either:
15 * the GNU Lesser General Public License as published by the Free
16 Software Foundation; either version 3 of the License, or (at your
17 option) any later version.
21 * the GNU General Public License as published by the Free
22 Software Foundation; either version 2 of the License, or (at your
23 option) any later version.
25 or both in parallel, as here.
27 GNU Nettle is distributed in the hope that it will be useful,
28 but WITHOUT ANY WARRANTY; without even the implied warranty of
29 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
30 General Public License for more details.
32 You should have received copies of the GNU General Public License and
33 the GNU Lesser General Public License along with this program. If
34 not, see http://www.gnu.org/licenses/.
38 chacha-ref.c version 2008.01.20.
50 #include "chacha-internal.h"
55 #define CHACHA_ROUNDS 20
57 #if HAVE_NATIVE_chacha_4core
58 #define _nettle_chacha_crypt_4core chacha_crypt
59 #define _nettle_chacha_crypt32_4core chacha_crypt32
60 #elif HAVE_NATIVE_chacha_3core
61 #define _nettle_chacha_crypt_3core chacha_crypt
62 #define _nettle_chacha_crypt32_3core chacha_crypt32
63 #elif !(HAVE_NATIVE_fat_chacha_4core || HAVE_NATIVE_fat_chacha_3core)
64 #define _nettle_chacha_crypt_1core chacha_crypt
65 #define _nettle_chacha_crypt32_1core chacha_crypt32
68 #if HAVE_NATIVE_chacha_4core || HAVE_NATIVE_fat_chacha_4core
70 _nettle_chacha_crypt_4core(struct chacha_ctx
*ctx
,
75 uint32_t x
[4*_CHACHA_STATE_LENGTH
];
80 while (length
> 2*CHACHA_BLOCK_SIZE
)
82 _nettle_chacha_4core (x
, ctx
->state
, CHACHA_ROUNDS
);
83 if (length
<= 4*CHACHA_BLOCK_SIZE
)
85 uint32_t incr
= 3 + (length
> 3*CHACHA_BLOCK_SIZE
);
86 ctx
->state
[12] += incr
;
87 ctx
->state
[13] += (ctx
->state
[12] < incr
);
88 memxor3 (dst
, src
, x
, length
);
92 ctx
->state
[13] += (ctx
->state
[12] < 4);
93 memxor3 (dst
, src
, x
, 4*CHACHA_BLOCK_SIZE
);
95 length
-= 4*CHACHA_BLOCK_SIZE
;
96 dst
+= 4*CHACHA_BLOCK_SIZE
;
97 src
+= 4*CHACHA_BLOCK_SIZE
;
99 if (length
> CHACHA_BLOCK_SIZE
)
101 _nettle_chacha_2core (x
, ctx
->state
, CHACHA_ROUNDS
);
103 ctx
->state
[13] += (ctx
->state
[12] < 2);
107 _nettle_chacha_core (x
, ctx
->state
, CHACHA_ROUNDS
);
108 ctx
->state
[13] += (++ctx
->state
[12] == 0);
110 memxor3 (dst
, src
, x
, length
);
114 #if HAVE_NATIVE_chacha_3core || HAVE_NATIVE_fat_chacha_3core
116 _nettle_chacha_crypt_3core(struct chacha_ctx
*ctx
,
121 uint32_t x
[3*_CHACHA_STATE_LENGTH
];
126 while (length
> 2*CHACHA_BLOCK_SIZE
)
128 _nettle_chacha_3core (x
, ctx
->state
, CHACHA_ROUNDS
);
130 ctx
->state
[13] += (ctx
->state
[12] < 3);
131 if (length
<= 3*CHACHA_BLOCK_SIZE
)
133 memxor3 (dst
, src
, x
, length
);
136 memxor3 (dst
, src
, x
, 3*CHACHA_BLOCK_SIZE
);
138 length
-= 3*CHACHA_BLOCK_SIZE
;
139 dst
+= 3*CHACHA_BLOCK_SIZE
;
140 src
+= 3*CHACHA_BLOCK_SIZE
;
142 if (length
<= CHACHA_BLOCK_SIZE
)
144 _nettle_chacha_core (x
, ctx
->state
, CHACHA_ROUNDS
);
145 ctx
->state
[13] += (++ctx
->state
[12] == 0);
149 _nettle_chacha_3core (x
, ctx
->state
, CHACHA_ROUNDS
);
151 ctx
->state
[13] += (ctx
->state
[12] < 2);
153 memxor3 (dst
, src
, x
, length
);
157 #if !(HAVE_NATIVE_chacha_4core || HAVE_NATIVE_chacha_3core)
159 _nettle_chacha_crypt_1core(struct chacha_ctx
*ctx
,
169 uint32_t x
[_CHACHA_STATE_LENGTH
];
171 _nettle_chacha_core (x
, ctx
->state
, CHACHA_ROUNDS
);
173 ctx
->state
[13] += (++ctx
->state
[12] == 0);
175 /* stopping at 2^70 length per nonce is user's responsibility */
177 if (length
<= CHACHA_BLOCK_SIZE
)
179 memxor3 (dst
, src
, x
, length
);
182 memxor3 (dst
, src
, x
, CHACHA_BLOCK_SIZE
);
184 length
-= CHACHA_BLOCK_SIZE
;
185 dst
+= CHACHA_BLOCK_SIZE
;
186 src
+= CHACHA_BLOCK_SIZE
;
191 #if HAVE_NATIVE_chacha_4core || HAVE_NATIVE_fat_chacha_4core
193 _nettle_chacha_crypt32_4core(struct chacha_ctx
*ctx
,
198 uint32_t x
[4*_CHACHA_STATE_LENGTH
];
203 while (length
> 2*CHACHA_BLOCK_SIZE
)
205 _nettle_chacha_4core32 (x
, ctx
->state
, CHACHA_ROUNDS
);
206 if (length
<= 4*CHACHA_BLOCK_SIZE
)
208 ctx
->state
[12] += 3 + (length
> 3*CHACHA_BLOCK_SIZE
);
209 memxor3 (dst
, src
, x
, length
);
213 memxor3 (dst
, src
, x
, 4*CHACHA_BLOCK_SIZE
);
215 length
-= 4*CHACHA_BLOCK_SIZE
;
216 dst
+= 4*CHACHA_BLOCK_SIZE
;
217 src
+= 4*CHACHA_BLOCK_SIZE
;
219 if (length
> CHACHA_BLOCK_SIZE
)
221 _nettle_chacha_2core32 (x
, ctx
->state
, CHACHA_ROUNDS
);
226 _nettle_chacha_core (x
, ctx
->state
, CHACHA_ROUNDS
);
229 memxor3 (dst
, src
, x
, length
);
233 #if HAVE_NATIVE_chacha_3core || HAVE_NATIVE_fat_chacha_3core
235 _nettle_chacha_crypt32_3core(struct chacha_ctx
*ctx
,
240 uint32_t x
[3*_CHACHA_STATE_LENGTH
];
245 while (length
> 2*CHACHA_BLOCK_SIZE
)
247 _nettle_chacha_3core32 (x
, ctx
->state
, CHACHA_ROUNDS
);
249 if (length
<= 3*CHACHA_BLOCK_SIZE
)
251 memxor3 (dst
, src
, x
, length
);
254 memxor3 (dst
, src
, x
, 3*CHACHA_BLOCK_SIZE
);
256 length
-= 3*CHACHA_BLOCK_SIZE
;
257 dst
+= 3*CHACHA_BLOCK_SIZE
;
258 src
+= 3*CHACHA_BLOCK_SIZE
;
260 if (length
<= CHACHA_BLOCK_SIZE
)
262 _nettle_chacha_core (x
, ctx
->state
, CHACHA_ROUNDS
);
267 _nettle_chacha_3core32 (x
, ctx
->state
, CHACHA_ROUNDS
);
270 memxor3 (dst
, src
, x
, length
);
274 #if !(HAVE_NATIVE_chacha_4core || HAVE_NATIVE_chacha_3core)
276 _nettle_chacha_crypt32_1core(struct chacha_ctx
*ctx
,
286 uint32_t x
[_CHACHA_STATE_LENGTH
];
288 _nettle_chacha_core (x
, ctx
->state
, CHACHA_ROUNDS
);
292 /* stopping at 2^38 length per nonce is user's responsibility */
294 if (length
<= CHACHA_BLOCK_SIZE
)
296 memxor3 (dst
, src
, x
, length
);
299 memxor3 (dst
, src
, x
, CHACHA_BLOCK_SIZE
);
301 length
-= CHACHA_BLOCK_SIZE
;
302 dst
+= CHACHA_BLOCK_SIZE
;
303 src
+= CHACHA_BLOCK_SIZE
;