lib/crypto: sha256: Make library API use strongly-typed contexts

Currently the SHA-224 and SHA-256 library functions can be mixed
arbitrarily, even in ways that are incorrect, for example using
sha224_init() and sha256_final().  This is because they operate on the
same structure, sha256_state.

Introduce stronger typing, as I did for SHA-384 and SHA-512.

Also as I did for SHA-384 and SHA-512, use the names *_ctx instead of
*_state.  The *_ctx names have the following small benefits:

- They're shorter.
- They avoid an ambiguity with the compression function state.
- They're consistent with the well-known OpenSSL API.
- Users usually name the variable 'sctx' anyway, which suggests that
  *_ctx would be the more natural name for the actual struct.

Therefore: update the SHA-224 and SHA-256 APIs, implementation, and
calling code accordingly.

In the new structs, also strongly-type the compression function state.

Acked-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20250630160645.3198-7-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
This commit is contained in:
Eric Biggers 2025-06-30 09:06:37 -07:00
parent 6fa4b29220
commit b86ced882b
8 changed files with 144 additions and 68 deletions

View File

@ -20,14 +20,14 @@ struct kexec_sha_region purgatory_sha_regions[KEXEC_SEGMENT_MAX] __section(".kex
static int verify_sha256_digest(void)
{
struct kexec_sha_region *ptr, *end;
struct sha256_state ss;
struct sha256_ctx sctx;
u8 digest[SHA256_DIGEST_SIZE];
sha256_init(&ss);
sha256_init(&sctx);
end = purgatory_sha_regions + ARRAY_SIZE(purgatory_sha_regions);
for (ptr = purgatory_sha_regions; ptr < end; ptr++)
sha256_update(&ss, (uint8_t *)(ptr->start), ptr->len);
sha256_final(&ss, digest);
sha256_update(&sctx, (uint8_t *)(ptr->start), ptr->len);
sha256_final(&sctx, digest);
if (memcmp(digest, purgatory_sha256_digest, sizeof(digest)) != 0)
return 1;
return 0;

View File

@ -16,7 +16,7 @@ int verify_sha256_digest(void)
{
struct kexec_sha_region *ptr, *end;
u8 digest[SHA256_DIGEST_SIZE];
struct sha256_state sctx;
struct sha256_ctx sctx;
sha256_init(&sctx);
end = purgatory_sha_regions + ARRAY_SIZE(purgatory_sha_regions);

View File

@ -25,7 +25,7 @@ static int verify_sha256_digest(void)
{
struct kexec_sha_region *ptr, *end;
u8 digest[SHA256_DIGEST_SIZE];
struct sha256_state sctx;
struct sha256_ctx sctx;
sha256_init(&sctx);
end = purgatory_sha_regions + ARRAY_SIZE(purgatory_sha_regions);

View File

@ -137,24 +137,24 @@ static int crypto_sha224_final_lib(struct shash_desc *desc, u8 *out)
static int crypto_sha256_import_lib(struct shash_desc *desc, const void *in)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
struct __sha256_ctx *sctx = shash_desc_ctx(desc);
const u8 *p = in;
memcpy(sctx, p, sizeof(*sctx));
p += sizeof(*sctx);
sctx->count += *p;
sctx->bytecount += *p;
return 0;
}
static int crypto_sha256_export_lib(struct shash_desc *desc, void *out)
{
struct sha256_state *sctx0 = shash_desc_ctx(desc);
struct sha256_state sctx = *sctx0;
struct __sha256_ctx *sctx0 = shash_desc_ctx(desc);
struct __sha256_ctx sctx = *sctx0;
unsigned int partial;
u8 *p = out;
partial = sctx.count % SHA256_BLOCK_SIZE;
sctx.count -= partial;
partial = sctx.bytecount % SHA256_BLOCK_SIZE;
sctx.bytecount -= partial;
memcpy(p, &sctx, sizeof(sctx));
p += sizeof(sctx);
*p = partial;
@ -201,7 +201,7 @@ static struct shash_alg algs[] = {
.update = crypto_sha256_update_lib,
.final = crypto_sha256_final_lib,
.digest = crypto_sha256_digest_lib,
.descsize = sizeof(struct sha256_state),
.descsize = sizeof(struct sha256_ctx),
.statesize = sizeof(struct crypto_sha256_state) +
SHA256_BLOCK_SIZE + 1,
.import = crypto_sha256_import_lib,
@ -216,7 +216,7 @@ static struct shash_alg algs[] = {
.init = crypto_sha224_init,
.update = crypto_sha256_update_lib,
.final = crypto_sha224_final_lib,
.descsize = sizeof(struct sha256_state),
.descsize = sizeof(struct sha224_ctx),
.statesize = sizeof(struct crypto_sha256_state) +
SHA256_BLOCK_SIZE + 1,
.import = crypto_sha256_import_lib,

View File

@ -390,7 +390,7 @@ static int tpm2_create_primary(struct tpm_chip *chip, u32 hierarchy,
* on every operation, so we weld the hmac init and final functions in
* here to give it the same usage characteristics as a regular hash
*/
static void tpm2_hmac_init(struct sha256_state *sctx, u8 *key, u32 key_len)
static void tpm2_hmac_init(struct sha256_ctx *sctx, u8 *key, u32 key_len)
{
u8 pad[SHA256_BLOCK_SIZE];
int i;
@ -406,7 +406,7 @@ static void tpm2_hmac_init(struct sha256_state *sctx, u8 *key, u32 key_len)
sha256_update(sctx, pad, sizeof(pad));
}
static void tpm2_hmac_final(struct sha256_state *sctx, u8 *key, u32 key_len,
static void tpm2_hmac_final(struct sha256_ctx *sctx, u8 *key, u32 key_len,
u8 *out)
{
u8 pad[SHA256_BLOCK_SIZE];
@ -440,7 +440,7 @@ static void tpm2_KDFa(u8 *key, u32 key_len, const char *label, u8 *u,
const __be32 bits = cpu_to_be32(bytes * 8);
while (bytes > 0) {
struct sha256_state sctx;
struct sha256_ctx sctx;
__be32 c = cpu_to_be32(counter);
tpm2_hmac_init(&sctx, key, key_len);
@ -467,7 +467,7 @@ static void tpm2_KDFa(u8 *key, u32 key_len, const char *label, u8 *u,
static void tpm2_KDFe(u8 z[EC_PT_SZ], const char *str, u8 *pt_u, u8 *pt_v,
u8 *out)
{
struct sha256_state sctx;
struct sha256_ctx sctx;
/*
* this should be an iterative counter, but because we know
* we're only taking 32 bytes for the point using a sha256
@ -592,7 +592,7 @@ void tpm_buf_fill_hmac_session(struct tpm_chip *chip, struct tpm_buf *buf)
u8 *hmac = NULL;
u32 attrs;
u8 cphash[SHA256_DIGEST_SIZE];
struct sha256_state sctx;
struct sha256_ctx sctx;
if (!auth)
return;
@ -750,7 +750,7 @@ int tpm_buf_check_hmac_response(struct tpm_chip *chip, struct tpm_buf *buf,
off_t offset_s, offset_p;
u8 rphash[SHA256_DIGEST_SIZE];
u32 attrs, cc;
struct sha256_state sctx;
struct sha256_ctx sctx;
u16 tag = be16_to_cpu(head->tag);
int parm_len, len, i, handles;

View File

@ -114,25 +114,55 @@ struct sha512_state {
u8 buf[SHA512_BLOCK_SIZE];
};
void sha256_update(struct sha256_state *sctx, const u8 *data, size_t len);
/* State for the SHA-256 (and SHA-224) compression function */
struct sha256_block_state {
u32 h[SHA256_STATE_WORDS];
};
static inline void sha224_init(struct sha256_state *sctx)
{
sha224_block_init(&sctx->ctx);
}
static inline void sha224_update(struct sha256_state *sctx,
/*
* Context structure, shared by SHA-224 and SHA-256. The sha224_ctx and
* sha256_ctx structs wrap this one so that the API has proper typing and
* doesn't allow mixing the SHA-224 and SHA-256 functions arbitrarily.
*/
struct __sha256_ctx {
struct sha256_block_state state;
u64 bytecount;
u8 buf[SHA256_BLOCK_SIZE] __aligned(__alignof__(__be64));
};
void __sha256_update(struct __sha256_ctx *ctx, const u8 *data, size_t len);
/**
* struct sha224_ctx - Context for hashing a message with SHA-224
* @ctx: private
*/
struct sha224_ctx {
struct __sha256_ctx ctx;
};
void sha224_init(struct sha224_ctx *ctx);
static inline void sha224_update(struct sha224_ctx *ctx,
const u8 *data, size_t len)
{
sha256_update(sctx, data, len);
__sha256_update(&ctx->ctx, data, len);
}
void sha224_final(struct sha256_state *sctx, u8 out[SHA224_DIGEST_SIZE]);
void sha224_final(struct sha224_ctx *ctx, u8 out[SHA224_DIGEST_SIZE]);
void sha224(const u8 *data, size_t len, u8 out[SHA224_DIGEST_SIZE]);
static inline void sha256_init(struct sha256_state *sctx)
/**
* struct sha256_ctx - Context for hashing a message with SHA-256
* @ctx: private
*/
struct sha256_ctx {
struct __sha256_ctx ctx;
};
void sha256_init(struct sha256_ctx *ctx);
static inline void sha256_update(struct sha256_ctx *ctx,
const u8 *data, size_t len)
{
sha256_block_init(&sctx->ctx);
__sha256_update(&ctx->ctx, data, len);
}
void sha256_final(struct sha256_state *sctx, u8 out[SHA256_DIGEST_SIZE]);
void sha256_final(struct sha256_ctx *ctx, u8 out[SHA256_DIGEST_SIZE]);
void sha256(const u8 *data, size_t len, u8 out[SHA256_DIGEST_SIZE]);
/* State for the SHA-512 (and SHA-384) compression function */

View File

@ -751,7 +751,7 @@ int kexec_add_buffer(struct kexec_buf *kbuf)
/* Calculate and store the digest of segments */
static int kexec_calculate_store_digests(struct kimage *image)
{
struct sha256_state state;
struct sha256_ctx sctx;
int ret = 0, i, j, zero_buf_sz, sha_region_sz;
size_t nullsz;
u8 digest[SHA256_DIGEST_SIZE];
@ -770,7 +770,7 @@ static int kexec_calculate_store_digests(struct kimage *image)
if (!sha_regions)
return -ENOMEM;
sha256_init(&state);
sha256_init(&sctx);
for (j = i = 0; i < image->nr_segments; i++) {
struct kexec_segment *ksegment;
@ -796,7 +796,7 @@ static int kexec_calculate_store_digests(struct kimage *image)
if (check_ima_segment_index(image, i))
continue;
sha256_update(&state, ksegment->kbuf, ksegment->bufsz);
sha256_update(&sctx, ksegment->kbuf, ksegment->bufsz);
/*
* Assume rest of the buffer is filled with zero and
@ -808,7 +808,7 @@ static int kexec_calculate_store_digests(struct kimage *image)
if (bytes > zero_buf_sz)
bytes = zero_buf_sz;
sha256_update(&state, zero_buf, bytes);
sha256_update(&sctx, zero_buf, bytes);
nullsz -= bytes;
}
@ -817,7 +817,7 @@ static int kexec_calculate_store_digests(struct kimage *image)
j++;
}
sha256_final(&state, digest);
sha256_final(&sctx, digest);
ret = kexec_purgatory_get_set_symbol(image, "purgatory_sha_regions",
sha_regions, sha_region_sz, 0);

View File

@ -18,6 +18,20 @@
#include <linux/module.h>
#include <linux/string.h>
static const struct sha256_block_state sha224_iv = {
.h = {
SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
},
};
static const struct sha256_block_state sha256_iv = {
.h = {
SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
},
};
/*
* If __DISABLE_EXPORTS is defined, then this file is being compiled for a
* pre-boot environment. In that case, ignore the kconfig options, pull the
@ -32,61 +46,93 @@ static inline bool sha256_purgatory(void)
return __is_defined(__DISABLE_EXPORTS);
}
static inline void sha256_blocks(u32 state[SHA256_STATE_WORDS], const u8 *data,
size_t nblocks)
static inline void sha256_blocks(struct sha256_block_state *state,
const u8 *data, size_t nblocks)
{
sha256_choose_blocks(state, data, nblocks, sha256_purgatory(), false);
sha256_choose_blocks(state->h, data, nblocks, sha256_purgatory(), false);
}
void sha256_update(struct sha256_state *sctx, const u8 *data, size_t len)
static void __sha256_init(struct __sha256_ctx *ctx,
const struct sha256_block_state *iv,
u64 initial_bytecount)
{
size_t partial = sctx->count % SHA256_BLOCK_SIZE;
sctx->count += len;
BLOCK_HASH_UPDATE_BLOCKS(sha256_blocks, sctx->ctx.state, data, len,
SHA256_BLOCK_SIZE, sctx->buf, partial);
}
EXPORT_SYMBOL(sha256_update);
static inline void __sha256_final(struct sha256_state *sctx, u8 *out,
size_t digest_size)
{
size_t partial = sctx->count % SHA256_BLOCK_SIZE;
sha256_finup(&sctx->ctx, sctx->buf, partial, out, digest_size,
sha256_purgatory(), false);
memzero_explicit(sctx, sizeof(*sctx));
ctx->state = *iv;
ctx->bytecount = initial_bytecount;
}
void sha224_final(struct sha256_state *sctx, u8 out[SHA224_DIGEST_SIZE])
void sha224_init(struct sha224_ctx *ctx)
{
__sha256_final(sctx, out, SHA224_DIGEST_SIZE);
__sha256_init(&ctx->ctx, &sha224_iv, 0);
}
EXPORT_SYMBOL_GPL(sha224_init);
void sha256_init(struct sha256_ctx *ctx)
{
__sha256_init(&ctx->ctx, &sha256_iv, 0);
}
EXPORT_SYMBOL_GPL(sha256_init);
void __sha256_update(struct __sha256_ctx *ctx, const u8 *data, size_t len)
{
size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE;
ctx->bytecount += len;
BLOCK_HASH_UPDATE_BLOCKS(sha256_blocks, &ctx->state, data, len,
SHA256_BLOCK_SIZE, ctx->buf, partial);
}
EXPORT_SYMBOL(__sha256_update);
static void __sha256_final(struct __sha256_ctx *ctx,
u8 *out, size_t digest_size)
{
u64 bitcount = ctx->bytecount << 3;
size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE;
ctx->buf[partial++] = 0x80;
if (partial > SHA256_BLOCK_SIZE - 8) {
memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - partial);
sha256_blocks(&ctx->state, ctx->buf, 1);
partial = 0;
}
memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - 8 - partial);
*(__be64 *)&ctx->buf[SHA256_BLOCK_SIZE - 8] = cpu_to_be64(bitcount);
sha256_blocks(&ctx->state, ctx->buf, 1);
for (size_t i = 0; i < digest_size; i += 4)
put_unaligned_be32(ctx->state.h[i / 4], out + i);
}
void sha224_final(struct sha224_ctx *ctx, u8 out[SHA224_DIGEST_SIZE])
{
__sha256_final(&ctx->ctx, out, SHA224_DIGEST_SIZE);
memzero_explicit(ctx, sizeof(*ctx));
}
EXPORT_SYMBOL(sha224_final);
void sha256_final(struct sha256_state *sctx, u8 out[SHA256_DIGEST_SIZE])
void sha256_final(struct sha256_ctx *ctx, u8 out[SHA256_DIGEST_SIZE])
{
__sha256_final(sctx, out, SHA256_DIGEST_SIZE);
__sha256_final(&ctx->ctx, out, SHA256_DIGEST_SIZE);
memzero_explicit(ctx, sizeof(*ctx));
}
EXPORT_SYMBOL(sha256_final);
void sha224(const u8 *data, size_t len, u8 out[SHA224_DIGEST_SIZE])
{
struct sha256_state sctx;
struct sha224_ctx ctx;
sha224_init(&sctx);
sha224_update(&sctx, data, len);
sha224_final(&sctx, out);
sha224_init(&ctx);
sha224_update(&ctx, data, len);
sha224_final(&ctx, out);
}
EXPORT_SYMBOL(sha224);
void sha256(const u8 *data, size_t len, u8 out[SHA256_DIGEST_SIZE])
{
struct sha256_state sctx;
struct sha256_ctx ctx;
sha256_init(&sctx);
sha256_update(&sctx, data, len);
sha256_final(&sctx, out);
sha256_init(&ctx);
sha256_update(&ctx, data, len);
sha256_final(&ctx, out);
}
EXPORT_SYMBOL(sha256);