summaryrefslogtreecommitdiff
path: root/lib/crypto
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@kernel.org>2025-06-30 09:06:37 -0700
committerEric Biggers <ebiggers@kernel.org>2025-07-04 10:18:53 -0700
commitb86ced882b8e667758afddffd8d6354197842110 (patch)
treec2b980acaf7178d2b104502930a091c057de8285 /lib/crypto
parent6fa4b292204b15e0e269a9fd33bc99b5e36b6883 (diff)
downloadlinux-b86ced882b8e667758afddffd8d6354197842110.tar.gz
linux-b86ced882b8e667758afddffd8d6354197842110.tar.bz2
linux-b86ced882b8e667758afddffd8d6354197842110.zip
lib/crypto: sha256: Make library API use strongly-typed contexts
Currently the SHA-224 and SHA-256 library functions can be mixed arbitrarily, even in ways that are incorrect, for example using sha224_init() and sha256_final(). This is because they operate on the same structure, sha256_state. Introduce stronger typing, as I did for SHA-384 and SHA-512. Also as I did for SHA-384 and SHA-512, use the names *_ctx instead of *_state. The *_ctx names have the following small benefits: - They're shorter. - They avoid an ambiguity with the compression function state. - They're consistent with the well-known OpenSSL API. - Users usually name the variable 'sctx' anyway, which suggests that *_ctx would be the more natural name for the actual struct. Therefore: update the SHA-224 and SHA-256 APIs, implementation, and calling code accordingly. In the new structs, also strongly-type the compression function state. Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20250630160645.3198-7-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
Diffstat (limited to 'lib/crypto')
-rw-r--r--lib/crypto/sha256.c100
1 files changed, 73 insertions, 27 deletions
diff --git a/lib/crypto/sha256.c b/lib/crypto/sha256.c
index ccaae7088016..3e7797a4489d 100644
--- a/lib/crypto/sha256.c
+++ b/lib/crypto/sha256.c
@@ -18,6 +18,20 @@
#include <linux/module.h>
#include <linux/string.h>
+static const struct sha256_block_state sha224_iv = {
+ .h = {
+ SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
+ SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
+ },
+};
+
+static const struct sha256_block_state sha256_iv = {
+ .h = {
+ SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
+ SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
+ },
+};
+
/*
* If __DISABLE_EXPORTS is defined, then this file is being compiled for a
* pre-boot environment. In that case, ignore the kconfig options, pull the
@@ -32,61 +46,93 @@ static inline bool sha256_purgatory(void)
return __is_defined(__DISABLE_EXPORTS);
}
-static inline void sha256_blocks(u32 state[SHA256_STATE_WORDS], const u8 *data,
- size_t nblocks)
+static inline void sha256_blocks(struct sha256_block_state *state,
+ const u8 *data, size_t nblocks)
+{
+ sha256_choose_blocks(state->h, data, nblocks, sha256_purgatory(), false);
+}
+
+static void __sha256_init(struct __sha256_ctx *ctx,
+ const struct sha256_block_state *iv,
+ u64 initial_bytecount)
+{
+ ctx->state = *iv;
+ ctx->bytecount = initial_bytecount;
+}
+
+void sha224_init(struct sha224_ctx *ctx)
+{
+ __sha256_init(&ctx->ctx, &sha224_iv, 0);
+}
+EXPORT_SYMBOL_GPL(sha224_init);
+
+void sha256_init(struct sha256_ctx *ctx)
{
- sha256_choose_blocks(state, data, nblocks, sha256_purgatory(), false);
+ __sha256_init(&ctx->ctx, &sha256_iv, 0);
}
+EXPORT_SYMBOL_GPL(sha256_init);
-void sha256_update(struct sha256_state *sctx, const u8 *data, size_t len)
+void __sha256_update(struct __sha256_ctx *ctx, const u8 *data, size_t len)
{
- size_t partial = sctx->count % SHA256_BLOCK_SIZE;
+ size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE;
- sctx->count += len;
- BLOCK_HASH_UPDATE_BLOCKS(sha256_blocks, sctx->ctx.state, data, len,
- SHA256_BLOCK_SIZE, sctx->buf, partial);
+ ctx->bytecount += len;
+ BLOCK_HASH_UPDATE_BLOCKS(sha256_blocks, &ctx->state, data, len,
+ SHA256_BLOCK_SIZE, ctx->buf, partial);
}
-EXPORT_SYMBOL(sha256_update);
+EXPORT_SYMBOL(__sha256_update);
-static inline void __sha256_final(struct sha256_state *sctx, u8 *out,
- size_t digest_size)
+static void __sha256_final(struct __sha256_ctx *ctx,
+ u8 *out, size_t digest_size)
{
- size_t partial = sctx->count % SHA256_BLOCK_SIZE;
+ u64 bitcount = ctx->bytecount << 3;
+ size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE;
+
+ ctx->buf[partial++] = 0x80;
+ if (partial > SHA256_BLOCK_SIZE - 8) {
+ memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - partial);
+ sha256_blocks(&ctx->state, ctx->buf, 1);
+ partial = 0;
+ }
+ memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - 8 - partial);
+ *(__be64 *)&ctx->buf[SHA256_BLOCK_SIZE - 8] = cpu_to_be64(bitcount);
+ sha256_blocks(&ctx->state, ctx->buf, 1);
- sha256_finup(&sctx->ctx, sctx->buf, partial, out, digest_size,
- sha256_purgatory(), false);
- memzero_explicit(sctx, sizeof(*sctx));
+ for (size_t i = 0; i < digest_size; i += 4)
+ put_unaligned_be32(ctx->state.h[i / 4], out + i);
}
-void sha224_final(struct sha256_state *sctx, u8 out[SHA224_DIGEST_SIZE])
+void sha224_final(struct sha224_ctx *ctx, u8 out[SHA224_DIGEST_SIZE])
{
- __sha256_final(sctx, out, SHA224_DIGEST_SIZE);
+ __sha256_final(&ctx->ctx, out, SHA224_DIGEST_SIZE);
+ memzero_explicit(ctx, sizeof(*ctx));
}
EXPORT_SYMBOL(sha224_final);
-void sha256_final(struct sha256_state *sctx, u8 out[SHA256_DIGEST_SIZE])
+void sha256_final(struct sha256_ctx *ctx, u8 out[SHA256_DIGEST_SIZE])
{
- __sha256_final(sctx, out, SHA256_DIGEST_SIZE);
+ __sha256_final(&ctx->ctx, out, SHA256_DIGEST_SIZE);
+ memzero_explicit(ctx, sizeof(*ctx));
}
EXPORT_SYMBOL(sha256_final);
void sha224(const u8 *data, size_t len, u8 out[SHA224_DIGEST_SIZE])
{
- struct sha256_state sctx;
+ struct sha224_ctx ctx;
- sha224_init(&sctx);
- sha224_update(&sctx, data, len);
- sha224_final(&sctx, out);
+ sha224_init(&ctx);
+ sha224_update(&ctx, data, len);
+ sha224_final(&ctx, out);
}
EXPORT_SYMBOL(sha224);
void sha256(const u8 *data, size_t len, u8 out[SHA256_DIGEST_SIZE])
{
- struct sha256_state sctx;
+ struct sha256_ctx ctx;
- sha256_init(&sctx);
- sha256_update(&sctx, data, len);
- sha256_final(&sctx, out);
+ sha256_init(&ctx);
+ sha256_update(&ctx, data, len);
+ sha256_final(&ctx, out);
}
EXPORT_SYMBOL(sha256);