lib/crypto: sha256: Propagate sha256_block_state type to implementations
The previous commit made the SHA-256 compression function state be strongly typed, but it wasn't propagated all the way down to the implementations of it. Do that now. Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20250630160645.3198-8-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
This commit is contained in:
parent
b86ced882b
commit
4c855d5069
|
|
@ -22,7 +22,7 @@
|
|||
* We pass everything as 64-bit. OCTEON can handle misaligned data.
|
||||
*/
|
||||
|
||||
void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_arch(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks)
|
||||
{
|
||||
struct octeon_cop2_state cop2_state;
|
||||
|
|
|
|||
|
|
@ -17,9 +17,9 @@ static inline bool sha256_is_arch_optimized(void)
|
|||
return false;
|
||||
}
|
||||
#endif
|
||||
void sha256_blocks_generic(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_generic(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_arch(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
|
||||
static __always_inline void sha256_choose_blocks(
|
||||
|
|
@ -27,9 +27,9 @@ static __always_inline void sha256_choose_blocks(
|
|||
bool force_generic, bool force_simd)
|
||||
{
|
||||
if (!IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_SHA256) || force_generic)
|
||||
sha256_blocks_generic(state, data, nblocks);
|
||||
sha256_blocks_generic((struct sha256_block_state *)state, data, nblocks);
|
||||
else
|
||||
sha256_blocks_arch(state, data, nblocks);
|
||||
sha256_blocks_arch((struct sha256_block_state *)state, data, nblocks);
|
||||
}
|
||||
|
||||
static __always_inline void sha256_finup(
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@
|
|||
.word 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
|
||||
|
||||
/*
|
||||
* void sha256_ce_transform(u32 state[SHA256_STATE_WORDS],
|
||||
* void sha256_ce_transform(struct sha256_block_state *state,
|
||||
* const u8 *data, size_t nblocks);
|
||||
*/
|
||||
ENTRY(sha256_ce_transform)
|
||||
|
|
|
|||
|
|
@ -10,17 +10,17 @@
|
|||
#include <linux/kernel.h>
|
||||
#include <linux/module.h>
|
||||
|
||||
asmlinkage void sha256_block_data_order(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage void sha256_block_data_order(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
asmlinkage void sha256_block_data_order_neon(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage void sha256_block_data_order_neon(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
asmlinkage void sha256_ce_transform(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage void sha256_ce_transform(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
|
||||
|
||||
void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_arch(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks)
|
||||
{
|
||||
if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@
|
|||
.word 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
|
||||
|
||||
/*
|
||||
* size_t __sha256_ce_transform(u32 state[SHA256_STATE_WORDS],
|
||||
* size_t __sha256_ce_transform(struct sha256_block_state *state,
|
||||
* const u8 *data, size_t nblocks);
|
||||
*/
|
||||
.text
|
||||
|
|
|
|||
|
|
@ -10,17 +10,17 @@
|
|||
#include <linux/kernel.h>
|
||||
#include <linux/module.h>
|
||||
|
||||
asmlinkage void sha256_block_data_order(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage void sha256_block_data_order(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
asmlinkage void sha256_block_neon(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage void sha256_block_neon(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
asmlinkage size_t __sha256_ce_transform(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage size_t __sha256_ce_transform(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
|
||||
|
||||
void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_arch(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks)
|
||||
{
|
||||
if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
|
||||
|
|
|
|||
|
|
@ -26,7 +26,8 @@
|
|||
*/
|
||||
#define MAX_BYTES 1024
|
||||
|
||||
extern void ppc_spe_sha256_transform(u32 *state, const u8 *src, u32 blocks);
|
||||
extern void ppc_spe_sha256_transform(struct sha256_block_state *state,
|
||||
const u8 *src, u32 blocks);
|
||||
|
||||
static void spe_begin(void)
|
||||
{
|
||||
|
|
@ -42,7 +43,7 @@ static void spe_end(void)
|
|||
preempt_enable();
|
||||
}
|
||||
|
||||
void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_arch(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks)
|
||||
{
|
||||
do {
|
||||
|
|
|
|||
|
|
@ -106,7 +106,7 @@
|
|||
sha256_4rounds \last, \k3, W3, W0, W1, W2
|
||||
.endm
|
||||
|
||||
// void sha256_transform_zvknha_or_zvknhb_zvkb(u32 state[SHA256_STATE_WORDS],
|
||||
// void sha256_transform_zvknha_or_zvknhb_zvkb(struct sha256_block_state *state,
|
||||
// const u8 *data, size_t nblocks);
|
||||
SYM_FUNC_START(sha256_transform_zvknha_or_zvknhb_zvkb)
|
||||
|
||||
|
|
|
|||
|
|
@ -15,12 +15,13 @@
|
|||
#include <linux/kernel.h>
|
||||
#include <linux/module.h>
|
||||
|
||||
asmlinkage void sha256_transform_zvknha_or_zvknhb_zvkb(
|
||||
u32 state[SHA256_STATE_WORDS], const u8 *data, size_t nblocks);
|
||||
asmlinkage void
|
||||
sha256_transform_zvknha_or_zvknhb_zvkb(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_extensions);
|
||||
|
||||
void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_arch(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks)
|
||||
{
|
||||
if (static_branch_likely(&have_extensions) && crypto_simd_usable()) {
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_cpacf_sha256);
|
||||
|
||||
void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_arch(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks)
|
||||
{
|
||||
if (static_branch_likely(&have_cpacf_sha256))
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ static inline void BLEND_OP(int I, u32 *W)
|
|||
h = t1 + t2; \
|
||||
} while (0)
|
||||
|
||||
static void sha256_block_generic(u32 state[SHA256_STATE_WORDS],
|
||||
static void sha256_block_generic(struct sha256_block_state *state,
|
||||
const u8 *input, u32 W[64])
|
||||
{
|
||||
u32 a, b, c, d, e, f, g, h;
|
||||
|
|
@ -101,8 +101,14 @@ static void sha256_block_generic(u32 state[SHA256_STATE_WORDS],
|
|||
}
|
||||
|
||||
/* load the state into our registers */
|
||||
a = state[0]; b = state[1]; c = state[2]; d = state[3];
|
||||
e = state[4]; f = state[5]; g = state[6]; h = state[7];
|
||||
a = state->h[0];
|
||||
b = state->h[1];
|
||||
c = state->h[2];
|
||||
d = state->h[3];
|
||||
e = state->h[4];
|
||||
f = state->h[5];
|
||||
g = state->h[6];
|
||||
h = state->h[7];
|
||||
|
||||
/* now iterate */
|
||||
for (i = 0; i < 64; i += 8) {
|
||||
|
|
@ -116,11 +122,17 @@ static void sha256_block_generic(u32 state[SHA256_STATE_WORDS],
|
|||
SHA256_ROUND(i + 7, b, c, d, e, f, g, h, a);
|
||||
}
|
||||
|
||||
state[0] += a; state[1] += b; state[2] += c; state[3] += d;
|
||||
state[4] += e; state[5] += f; state[6] += g; state[7] += h;
|
||||
state->h[0] += a;
|
||||
state->h[1] += b;
|
||||
state->h[2] += c;
|
||||
state->h[3] += d;
|
||||
state->h[4] += e;
|
||||
state->h[5] += f;
|
||||
state->h[6] += g;
|
||||
state->h[7] += h;
|
||||
}
|
||||
|
||||
void sha256_blocks_generic(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_generic(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks)
|
||||
{
|
||||
u32 W[64];
|
||||
|
|
|
|||
|
|
@ -19,10 +19,10 @@
|
|||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_sha256_opcodes);
|
||||
|
||||
asmlinkage void sha256_sparc64_transform(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage void sha256_sparc64_transform(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
|
||||
void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_arch(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks)
|
||||
{
|
||||
if (static_branch_likely(&have_sha256_opcodes))
|
||||
|
|
|
|||
|
|
@ -341,7 +341,7 @@ a = TMP_
|
|||
.endm
|
||||
|
||||
########################################################################
|
||||
## void sha256_transform_avx(u32 state[SHA256_STATE_WORDS],
|
||||
## void sha256_transform_avx(struct sha256_block_state *state,
|
||||
## const u8 *data, size_t nblocks);
|
||||
########################################################################
|
||||
.text
|
||||
|
|
|
|||
|
|
@ -518,7 +518,7 @@ STACK_SIZE = _CTX + _CTX_SIZE
|
|||
.endm
|
||||
|
||||
########################################################################
|
||||
## void sha256_transform_rorx(u32 state[SHA256_STATE_WORDS],
|
||||
## void sha256_transform_rorx(struct sha256_block_state *state,
|
||||
## const u8 *data, size_t nblocks);
|
||||
########################################################################
|
||||
.text
|
||||
|
|
|
|||
|
|
@ -106,7 +106,7 @@
|
|||
* only processes complete blocks. State initialization, buffering of partial
|
||||
* blocks, and digest finalization is expected to be handled elsewhere.
|
||||
*
|
||||
* void sha256_ni_transform(u32 state[SHA256_STATE_WORDS],
|
||||
* void sha256_ni_transform(struct sha256_block_state *state,
|
||||
* const u8 *data, size_t nblocks);
|
||||
*/
|
||||
.text
|
||||
|
|
|
|||
|
|
@ -348,7 +348,7 @@ a = TMP_
|
|||
.endm
|
||||
|
||||
########################################################################
|
||||
## void sha256_transform_ssse3(u32 state[SHA256_STATE_WORDS],
|
||||
## void sha256_transform_ssse3(struct sha256_block_state *state,
|
||||
## const u8 *data, size_t nblocks);
|
||||
########################################################################
|
||||
.text
|
||||
|
|
|
|||
|
|
@ -11,20 +11,20 @@
|
|||
#include <linux/module.h>
|
||||
#include <linux/static_call.h>
|
||||
|
||||
asmlinkage void sha256_transform_ssse3(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage void sha256_transform_ssse3(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
asmlinkage void sha256_transform_avx(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage void sha256_transform_avx(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
asmlinkage void sha256_transform_rorx(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage void sha256_transform_rorx(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
asmlinkage void sha256_ni_transform(u32 state[SHA256_STATE_WORDS],
|
||||
asmlinkage void sha256_ni_transform(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks);
|
||||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_sha256_x86);
|
||||
|
||||
DEFINE_STATIC_CALL(sha256_blocks_x86, sha256_transform_ssse3);
|
||||
|
||||
void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
|
||||
void sha256_blocks_arch(struct sha256_block_state *state,
|
||||
const u8 *data, size_t nblocks)
|
||||
{
|
||||
if (static_branch_likely(&have_sha256_x86) && crypto_simd_usable()) {
|
||||
|
|
|
|||
Loading…
Reference in New Issue