static int verify_sha256_digest(void)
{
struct kexec_sha_region *ptr, *end;
- struct sha256_state ss;
+ struct sha256_ctx sctx;
u8 digest[SHA256_DIGEST_SIZE];
- sha256_init(&ss);
+ sha256_init(&sctx);
end = purgatory_sha_regions + ARRAY_SIZE(purgatory_sha_regions);
for (ptr = purgatory_sha_regions; ptr < end; ptr++)
- sha256_update(&ss, (uint8_t *)(ptr->start), ptr->len);
- sha256_final(&ss, digest);
+ sha256_update(&sctx, (uint8_t *)(ptr->start), ptr->len);
+ sha256_final(&sctx, digest);
if (memcmp(digest, purgatory_sha256_digest, sizeof(digest)) != 0)
return 1;
return 0;
{
struct kexec_sha_region *ptr, *end;
u8 digest[SHA256_DIGEST_SIZE];
- struct sha256_state sctx;
+ struct sha256_ctx sctx;
sha256_init(&sctx);
end = purgatory_sha_regions + ARRAY_SIZE(purgatory_sha_regions);
{
struct kexec_sha_region *ptr, *end;
u8 digest[SHA256_DIGEST_SIZE];
- struct sha256_state sctx;
+ struct sha256_ctx sctx;
sha256_init(&sctx);
end = purgatory_sha_regions + ARRAY_SIZE(purgatory_sha_regions);
static int crypto_sha256_import_lib(struct shash_desc *desc, const void *in)
{
- struct sha256_state *sctx = shash_desc_ctx(desc);
+ struct __sha256_ctx *sctx = shash_desc_ctx(desc);
const u8 *p = in;
memcpy(sctx, p, sizeof(*sctx));
p += sizeof(*sctx);
- sctx->count += *p;
+ sctx->bytecount += *p;
return 0;
}
static int crypto_sha256_export_lib(struct shash_desc *desc, void *out)
{
- struct sha256_state *sctx0 = shash_desc_ctx(desc);
- struct sha256_state sctx = *sctx0;
+ struct __sha256_ctx *sctx0 = shash_desc_ctx(desc);
+ struct __sha256_ctx sctx = *sctx0;
unsigned int partial;
u8 *p = out;
- partial = sctx.count % SHA256_BLOCK_SIZE;
- sctx.count -= partial;
+ partial = sctx.bytecount % SHA256_BLOCK_SIZE;
+ sctx.bytecount -= partial;
memcpy(p, &sctx, sizeof(sctx));
p += sizeof(sctx);
*p = partial;
.update = crypto_sha256_update_lib,
.final = crypto_sha256_final_lib,
.digest = crypto_sha256_digest_lib,
- .descsize = sizeof(struct sha256_state),
+ .descsize = sizeof(struct sha256_ctx),
.statesize = sizeof(struct crypto_sha256_state) +
SHA256_BLOCK_SIZE + 1,
.import = crypto_sha256_import_lib,
.init = crypto_sha224_init,
.update = crypto_sha256_update_lib,
.final = crypto_sha224_final_lib,
- .descsize = sizeof(struct sha256_state),
+ .descsize = sizeof(struct sha224_ctx),
.statesize = sizeof(struct crypto_sha256_state) +
SHA256_BLOCK_SIZE + 1,
.import = crypto_sha256_import_lib,
* on every operation, so we weld the hmac init and final functions in
* here to give it the same usage characteristics as a regular hash
*/
-static void tpm2_hmac_init(struct sha256_state *sctx, u8 *key, u32 key_len)
+static void tpm2_hmac_init(struct sha256_ctx *sctx, u8 *key, u32 key_len)
{
u8 pad[SHA256_BLOCK_SIZE];
int i;
sha256_update(sctx, pad, sizeof(pad));
}
-static void tpm2_hmac_final(struct sha256_state *sctx, u8 *key, u32 key_len,
+static void tpm2_hmac_final(struct sha256_ctx *sctx, u8 *key, u32 key_len,
u8 *out)
{
u8 pad[SHA256_BLOCK_SIZE];
const __be32 bits = cpu_to_be32(bytes * 8);
while (bytes > 0) {
- struct sha256_state sctx;
+ struct sha256_ctx sctx;
__be32 c = cpu_to_be32(counter);
tpm2_hmac_init(&sctx, key, key_len);
static void tpm2_KDFe(u8 z[EC_PT_SZ], const char *str, u8 *pt_u, u8 *pt_v,
u8 *out)
{
- struct sha256_state sctx;
+ struct sha256_ctx sctx;
/*
* this should be an iterative counter, but because we know
* we're only taking 32 bytes for the point using a sha256
u8 *hmac = NULL;
u32 attrs;
u8 cphash[SHA256_DIGEST_SIZE];
- struct sha256_state sctx;
+ struct sha256_ctx sctx;
if (!auth)
return;
off_t offset_s, offset_p;
u8 rphash[SHA256_DIGEST_SIZE];
u32 attrs, cc;
- struct sha256_state sctx;
+ struct sha256_ctx sctx;
u16 tag = be16_to_cpu(head->tag);
int parm_len, len, i, handles;
u8 buf[SHA512_BLOCK_SIZE];
};
-void sha256_update(struct sha256_state *sctx, const u8 *data, size_t len);
+/* State for the SHA-256 (and SHA-224) compression function */
+struct sha256_block_state {
+ u32 h[SHA256_STATE_WORDS];
+};
-static inline void sha224_init(struct sha256_state *sctx)
-{
- sha224_block_init(&sctx->ctx);
-}
-static inline void sha224_update(struct sha256_state *sctx,
+/*
+ * Context structure, shared by SHA-224 and SHA-256. The sha224_ctx and
+ * sha256_ctx structs wrap this one so that the API has proper typing and
+ * doesn't allow mixing the SHA-224 and SHA-256 functions arbitrarily.
+ */
+struct __sha256_ctx {
+ struct sha256_block_state state;
+ u64 bytecount;
+ u8 buf[SHA256_BLOCK_SIZE] __aligned(__alignof__(__be64));
+};
+void __sha256_update(struct __sha256_ctx *ctx, const u8 *data, size_t len);
+
+/**
+ * struct sha224_ctx - Context for hashing a message with SHA-224
+ * @ctx: private
+ */
+struct sha224_ctx {
+ struct __sha256_ctx ctx;
+};
+
+void sha224_init(struct sha224_ctx *ctx);
+static inline void sha224_update(struct sha224_ctx *ctx,
const u8 *data, size_t len)
{
- sha256_update(sctx, data, len);
+ __sha256_update(&ctx->ctx, data, len);
}
-void sha224_final(struct sha256_state *sctx, u8 out[SHA224_DIGEST_SIZE]);
+void sha224_final(struct sha224_ctx *ctx, u8 out[SHA224_DIGEST_SIZE]);
void sha224(const u8 *data, size_t len, u8 out[SHA224_DIGEST_SIZE]);
-static inline void sha256_init(struct sha256_state *sctx)
+/**
+ * struct sha256_ctx - Context for hashing a message with SHA-256
+ * @ctx: private
+ */
+struct sha256_ctx {
+ struct __sha256_ctx ctx;
+};
+
+void sha256_init(struct sha256_ctx *ctx);
+static inline void sha256_update(struct sha256_ctx *ctx,
+ const u8 *data, size_t len)
{
- sha256_block_init(&sctx->ctx);
+ __sha256_update(&ctx->ctx, data, len);
}
-void sha256_final(struct sha256_state *sctx, u8 out[SHA256_DIGEST_SIZE]);
+void sha256_final(struct sha256_ctx *ctx, u8 out[SHA256_DIGEST_SIZE]);
void sha256(const u8 *data, size_t len, u8 out[SHA256_DIGEST_SIZE]);
/* State for the SHA-512 (and SHA-384) compression function */
/* Calculate and store the digest of segments */
static int kexec_calculate_store_digests(struct kimage *image)
{
- struct sha256_state state;
+ struct sha256_ctx sctx;
int ret = 0, i, j, zero_buf_sz, sha_region_sz;
size_t nullsz;
u8 digest[SHA256_DIGEST_SIZE];
if (!sha_regions)
return -ENOMEM;
- sha256_init(&state);
+ sha256_init(&sctx);
for (j = i = 0; i < image->nr_segments; i++) {
struct kexec_segment *ksegment;
if (check_ima_segment_index(image, i))
continue;
- sha256_update(&state, ksegment->kbuf, ksegment->bufsz);
+ sha256_update(&sctx, ksegment->kbuf, ksegment->bufsz);
/*
* Assume rest of the buffer is filled with zero and
if (bytes > zero_buf_sz)
bytes = zero_buf_sz;
- sha256_update(&state, zero_buf, bytes);
+ sha256_update(&sctx, zero_buf, bytes);
nullsz -= bytes;
}
j++;
}
- sha256_final(&state, digest);
+ sha256_final(&sctx, digest);
ret = kexec_purgatory_get_set_symbol(image, "purgatory_sha_regions",
sha_regions, sha_region_sz, 0);
#include <linux/module.h>
#include <linux/string.h>
+static const struct sha256_block_state sha224_iv = {
+ .h = {
+ SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
+ SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
+ },
+};
+
+static const struct sha256_block_state sha256_iv = {
+ .h = {
+ SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
+ SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
+ },
+};
+
/*
* If __DISABLE_EXPORTS is defined, then this file is being compiled for a
* pre-boot environment. In that case, ignore the kconfig options, pull the
return __is_defined(__DISABLE_EXPORTS);
}
-static inline void sha256_blocks(u32 state[SHA256_STATE_WORDS], const u8 *data,
- size_t nblocks)
+static inline void sha256_blocks(struct sha256_block_state *state,
+ const u8 *data, size_t nblocks)
+{
+ sha256_choose_blocks(state->h, data, nblocks, sha256_purgatory(), false);
+}
+
+static void __sha256_init(struct __sha256_ctx *ctx,
+ const struct sha256_block_state *iv,
+ u64 initial_bytecount)
+{
+ ctx->state = *iv;
+ ctx->bytecount = initial_bytecount;
+}
+
+void sha224_init(struct sha224_ctx *ctx)
+{
+ __sha256_init(&ctx->ctx, &sha224_iv, 0);
+}
+EXPORT_SYMBOL_GPL(sha224_init);
+
+void sha256_init(struct sha256_ctx *ctx)
{
- sha256_choose_blocks(state, data, nblocks, sha256_purgatory(), false);
+ __sha256_init(&ctx->ctx, &sha256_iv, 0);
}
+EXPORT_SYMBOL_GPL(sha256_init);
-void sha256_update(struct sha256_state *sctx, const u8 *data, size_t len)
+void __sha256_update(struct __sha256_ctx *ctx, const u8 *data, size_t len)
{
- size_t partial = sctx->count % SHA256_BLOCK_SIZE;
+ size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE;
- sctx->count += len;
- BLOCK_HASH_UPDATE_BLOCKS(sha256_blocks, sctx->ctx.state, data, len,
- SHA256_BLOCK_SIZE, sctx->buf, partial);
+ ctx->bytecount += len;
+ BLOCK_HASH_UPDATE_BLOCKS(sha256_blocks, &ctx->state, data, len,
+ SHA256_BLOCK_SIZE, ctx->buf, partial);
}
-EXPORT_SYMBOL(sha256_update);
+EXPORT_SYMBOL(__sha256_update);
-static inline void __sha256_final(struct sha256_state *sctx, u8 *out,
- size_t digest_size)
+static void __sha256_final(struct __sha256_ctx *ctx,
+ u8 *out, size_t digest_size)
{
- size_t partial = sctx->count % SHA256_BLOCK_SIZE;
+ u64 bitcount = ctx->bytecount << 3;
+ size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE;
+
+ ctx->buf[partial++] = 0x80;
+ if (partial > SHA256_BLOCK_SIZE - 8) {
+ memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - partial);
+ sha256_blocks(&ctx->state, ctx->buf, 1);
+ partial = 0;
+ }
+ memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - 8 - partial);
+ *(__be64 *)&ctx->buf[SHA256_BLOCK_SIZE - 8] = cpu_to_be64(bitcount);
+ sha256_blocks(&ctx->state, ctx->buf, 1);
- sha256_finup(&sctx->ctx, sctx->buf, partial, out, digest_size,
- sha256_purgatory(), false);
- memzero_explicit(sctx, sizeof(*sctx));
+ for (size_t i = 0; i < digest_size; i += 4)
+ put_unaligned_be32(ctx->state.h[i / 4], out + i);
}
-void sha224_final(struct sha256_state *sctx, u8 out[SHA224_DIGEST_SIZE])
+void sha224_final(struct sha224_ctx *ctx, u8 out[SHA224_DIGEST_SIZE])
{
- __sha256_final(sctx, out, SHA224_DIGEST_SIZE);
+ __sha256_final(&ctx->ctx, out, SHA224_DIGEST_SIZE);
+ memzero_explicit(ctx, sizeof(*ctx));
}
EXPORT_SYMBOL(sha224_final);
-void sha256_final(struct sha256_state *sctx, u8 out[SHA256_DIGEST_SIZE])
+void sha256_final(struct sha256_ctx *ctx, u8 out[SHA256_DIGEST_SIZE])
{
- __sha256_final(sctx, out, SHA256_DIGEST_SIZE);
+ __sha256_final(&ctx->ctx, out, SHA256_DIGEST_SIZE);
+ memzero_explicit(ctx, sizeof(*ctx));
}
EXPORT_SYMBOL(sha256_final);
void sha224(const u8 *data, size_t len, u8 out[SHA224_DIGEST_SIZE])
{
- struct sha256_state sctx;
+ struct sha224_ctx ctx;
- sha224_init(&sctx);
- sha224_update(&sctx, data, len);
- sha224_final(&sctx, out);
+ sha224_init(&ctx);
+ sha224_update(&ctx, data, len);
+ sha224_final(&ctx, out);
}
EXPORT_SYMBOL(sha224);
void sha256(const u8 *data, size_t len, u8 out[SHA256_DIGEST_SIZE])
{
- struct sha256_state sctx;
+ struct sha256_ctx ctx;
- sha256_init(&sctx);
- sha256_update(&sctx, data, len);
- sha256_final(&sctx, out);
+ sha256_init(&ctx);
+ sha256_update(&ctx, data, len);
+ sha256_final(&ctx, out);
}
EXPORT_SYMBOL(sha256);