#define CHACHA_ROUNDS 20
#if HAVE_NATIVE_chacha_3core
-#undef _chacha_crypt_3core
-#undef _chacha_crypt32_3core
-#define _chacha_crypt_3core chacha_crypt
-#define _chacha_crypt32_3core chacha_crypt32
+#define _nettle_chacha_crypt_3core chacha_crypt
+#define _nettle_chacha_crypt32_3core chacha_crypt32
#elif HAVE_NATIVE_chacha_2core
-#undef _chacha_crypt_2core
-#undef _chacha_crypt32_2core
-#define _chacha_crypt_2core chacha_crypt
-#define _chacha_crypt32_2core chacha_crypt32
+#define _nettle_chacha_crypt_2core chacha_crypt
+#define _nettle_chacha_crypt32_2core chacha_crypt32
#elif !(HAVE_NATIVE_fat_chacha_3core || HAVE_NATIVE_fat_chacha_2core)
-#undef _chacha_crypt_1core
-#undef _chacha_crypt32_1core
-#define _chacha_crypt_1core chacha_crypt
-#define _chacha_crypt32_1core chacha_crypt32
+#define _nettle_chacha_crypt_1core chacha_crypt
+#define _nettle_chacha_crypt32_1core chacha_crypt32
#endif
#if HAVE_NATIVE_chacha_3core || HAVE_NATIVE_fat_chacha_3core
void
-_chacha_crypt_3core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src)
+_nettle_chacha_crypt_3core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src)
{
uint32_t x[3*_CHACHA_STATE_LENGTH];
while (length > 2*CHACHA_BLOCK_SIZE)
{
- _chacha_3core (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_3core (x, ctx->state, CHACHA_ROUNDS);
ctx->state[12] += 3;
ctx->state[13] += (ctx->state[12] < 3);
if (length <= 3*CHACHA_BLOCK_SIZE)
}
if (length <= CHACHA_BLOCK_SIZE)
{
- _chacha_core (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_core (x, ctx->state, CHACHA_ROUNDS);
ctx->state[13] += (++ctx->state[12] == 0);
}
else
{
- _chacha_3core (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_3core (x, ctx->state, CHACHA_ROUNDS);
ctx->state[12] += 2;
ctx->state[13] += (ctx->state[12] < 2);
}
#if HAVE_NATIVE_chacha_2core || HAVE_NATIVE_fat_chacha_2core
void
-_chacha_crypt_2core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src)
+_nettle_chacha_crypt_2core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src)
{
uint32_t x[2*_CHACHA_STATE_LENGTH];
while (length > CHACHA_BLOCK_SIZE)
{
- _chacha_2core (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_2core (x, ctx->state, CHACHA_ROUNDS);
ctx->state[12] += 2;
ctx->state[13] += (ctx->state[12] < 2);
if (length <= 2*CHACHA_BLOCK_SIZE)
src += 2*CHACHA_BLOCK_SIZE;
}
- _chacha_core (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_core (x, ctx->state, CHACHA_ROUNDS);
memxor3 (dst, src, x, length);
ctx->state[13] += (++ctx->state[12] == 0);
}
#if !(HAVE_NATIVE_chacha_3core || HAVE_NATIVE_chacha_2core)
void
-_chacha_crypt_1core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src)
+_nettle_chacha_crypt_1core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src)
{
if (!length)
return;
{
uint32_t x[_CHACHA_STATE_LENGTH];
- _chacha_core (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_core (x, ctx->state, CHACHA_ROUNDS);
ctx->state[13] += (++ctx->state[12] == 0);
#if HAVE_NATIVE_chacha_3core || HAVE_NATIVE_fat_chacha_3core
void
-_chacha_crypt32_3core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src)
+_nettle_chacha_crypt32_3core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src)
{
uint32_t x[3*_CHACHA_STATE_LENGTH];
while (length > 2*CHACHA_BLOCK_SIZE)
{
- _chacha_3core32 (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_3core32 (x, ctx->state, CHACHA_ROUNDS);
ctx->state[12] += 3;
ctx->state[13] += (ctx->state[12] < 3);
if (length <= 3*CHACHA_BLOCK_SIZE)
}
if (length <= CHACHA_BLOCK_SIZE)
{
- _chacha_core (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_core (x, ctx->state, CHACHA_ROUNDS);
ctx->state[13] += (++ctx->state[12] == 0);
}
else
{
- _chacha_3core32 (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_3core32 (x, ctx->state, CHACHA_ROUNDS);
ctx->state[12] += 2;
ctx->state[13] += (ctx->state[12] < 2);
}
#if HAVE_NATIVE_chacha_2core || HAVE_NATIVE_fat_chacha_2core
void
-_chacha_crypt32_2core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src)
+_nettle_chacha_crypt32_2core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src)
{
uint32_t x[2*_CHACHA_STATE_LENGTH];
while (length > CHACHA_BLOCK_SIZE)
{
- _chacha_2core32 (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_2core32 (x, ctx->state, CHACHA_ROUNDS);
ctx->state[12] += 2;
if (length <= 2*CHACHA_BLOCK_SIZE)
{
src += 2*CHACHA_BLOCK_SIZE;
}
- _chacha_core (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_core (x, ctx->state, CHACHA_ROUNDS);
memxor3 (dst, src, x, length);
++ctx->state[12];
}
#if !(HAVE_NATIVE_chacha_3core || HAVE_NATIVE_chacha_2core)
void
-_chacha_crypt32_1core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src)
+_nettle_chacha_crypt32_1core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src)
{
if (!length)
return;
{
uint32_t x[_CHACHA_STATE_LENGTH];
- _chacha_core (x, ctx->state, CHACHA_ROUNDS);
+ _nettle_chacha_core (x, ctx->state, CHACHA_ROUNDS);
++ctx->state[12];
#include "nettle-types.h"
#include "chacha.h"
-#define _chacha_core _nettle_chacha_core
-#define _chacha_2core _nettle_chacha_2core
-#define _chacha_2core32 _nettle_chacha_2core32
-#define _chacha_3core _nettle_chacha_3core
-#define _chacha_3core32 _nettle_chacha_3core32
-#define _chacha_crypt_1core _nettle_chacha_crypt_1core
-#define _chacha_crypt_2core _nettle_chacha_crypt_2core
-#define _chacha_crypt_3core _nettle_chacha_crypt_3core
-#define _chacha_crypt32_1core _nettle_chacha_crypt32_1core
-#define _chacha_crypt32_2core _nettle_chacha_crypt32_2core
-#define _chacha_crypt32_3core _nettle_chacha_crypt32_3core
-
void
-_chacha_core(uint32_t *dst, const uint32_t *src, unsigned rounds);
+_nettle_chacha_core(uint32_t *dst, const uint32_t *src, unsigned rounds);
/* Functions available only in some configurations */
void
-_chacha_2core(uint32_t *dst, const uint32_t *src, unsigned rounds);
+_nettle_chacha_2core(uint32_t *dst, const uint32_t *src, unsigned rounds);
void
-_chacha_2core32(uint32_t *dst, const uint32_t *src, unsigned rounds);
+_nettle_chacha_2core32(uint32_t *dst, const uint32_t *src, unsigned rounds);
void
-_chacha_3core(uint32_t *dst, const uint32_t *src, unsigned rounds);
+_nettle_chacha_3core(uint32_t *dst, const uint32_t *src, unsigned rounds);
void
-_chacha_3core32(uint32_t *dst, const uint32_t *src, unsigned rounds);
+_nettle_chacha_3core32(uint32_t *dst, const uint32_t *src, unsigned rounds);
void
-_chacha_crypt_1core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src);
+_nettle_chacha_crypt_1core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src);
void
-_chacha_crypt_2core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src);
+_nettle_chacha_crypt_2core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src);
void
-_chacha_crypt_3core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src);
+_nettle_chacha_crypt_3core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src);
void
-_chacha_crypt32_1core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src);
+_nettle_chacha_crypt32_1core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src);
void
-_chacha_crypt32_2core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src);
+_nettle_chacha_crypt32_2core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src);
void
-_chacha_crypt32_3core(struct chacha_ctx *ctx,
- size_t length,
- uint8_t *dst,
- const uint8_t *src);
+_nettle_chacha_crypt32_3core(struct chacha_ctx *ctx,
+ size_t length,
+ uint8_t *dst,
+ const uint8_t *src);
#endif /* NETTLE_CHACHA_INTERNAL_H_INCLUDED */