On Wed, 19 Jan 2022 at 13:15, Jason A. Donenfeld <[email protected]> wrote:
>
> The below kludge of a patch fixes the issue. Still unclear whether we
> should go with something like this or get clang fixed or what.
>
> diff --git a/arch/arm/crypto/blake2s-shash.c b/arch/arm/crypto/blake2s-shash.c
> index 17c1c3bfe2f5..be8cde5f1719 100644
> --- a/arch/arm/crypto/blake2s-shash.c
> +++ b/arch/arm/crypto/blake2s-shash.c
> @@ -13,12 +13,12 @@
> static int crypto_blake2s_update_arm(struct shash_desc *desc,
> const u8 *in, unsigned int inlen)
> {
> - return crypto_blake2s_update(desc, in, inlen, blake2s_compress);
> + return crypto_blake2s_update(desc, in, inlen);
> }
>
> static int crypto_blake2s_final_arm(struct shash_desc *desc, u8 *out)
> {
> - return crypto_blake2s_final(desc, out, blake2s_compress);
> + return crypto_blake2s_final(desc, out);
> }
>
> #define BLAKE2S_ALG(name, driver_name, digest_size) \
> diff --git a/arch/x86/crypto/blake2s-shash.c b/arch/x86/crypto/blake2s-shash.c
> index f9e2fecdb761..c81ffedb4865 100644
> --- a/arch/x86/crypto/blake2s-shash.c
> +++ b/arch/x86/crypto/blake2s-shash.c
> @@ -18,12 +18,12 @@
> static int crypto_blake2s_update_x86(struct shash_desc *desc,
> const u8 *in, unsigned int inlen)
> {
> - return crypto_blake2s_update(desc, in, inlen, blake2s_compress);
> + return crypto_blake2s_update(desc, in, inlen);
> }
>
> static int crypto_blake2s_final_x86(struct shash_desc *desc, u8 *out)
> {
> - return crypto_blake2s_final(desc, out, blake2s_compress);
> + return crypto_blake2s_final(desc, out);
> }
>
> #define BLAKE2S_ALG(name, driver_name, digest_size) \
> diff --git a/crypto/blake2s_generic.c b/crypto/blake2s_generic.c
> index 72fe480f9bd6..050874588a84 100644
> --- a/crypto/blake2s_generic.c
> +++ b/crypto/blake2s_generic.c
> @@ -5,6 +5,7 @@
> * Copyright (C) 2015-2019 Jason A. Donenfeld <[email protected]>. All
> Rights Reserved.
> */
>
> +#define FORCE_BLAKE2S_GENERIC
> #include <crypto/internal/blake2s.h>
> #include <crypto/internal/hash.h>
>
I'd prefer it if we could avoid magic #define's like this. We could
fix it up locally to crypto/internal/blake2s.h just by doing something
like the below.
diff --git a/include/crypto/internal/blake2s.h
b/include/crypto/internal/blake2s.h
index d39cfa0d333e..9e52c07c54cc 100644
--- a/include/crypto/internal/blake2s.h
+++ b/include/crypto/internal/blake2s.h
@@ -39,7 +39,11 @@ static inline void __blake2s_update(struct
blake2s_state *state,
return;
if (inlen > fill) {
memcpy(state->buf + state->buflen, in, fill);
- (*compress)(state, state->buf, 1, BLAKE2S_BLOCK_SIZE);
+ if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_BLAKE2S))
+ (*compress)(state, state->buf, 1, BLAKE2S_BLOCK_SIZE);
+ else
+ blake2s_compress_generic(state, state->buf, 1,
+ BLAKE2S_BLOCK_SIZE);
state->buflen = 0;
in += fill;
inlen -= fill;
@@ -47,7 +51,11 @@ static inline void __blake2s_update(struct
blake2s_state *state,
if (inlen > BLAKE2S_BLOCK_SIZE) {
const size_t nblocks = DIV_ROUND_UP(inlen, BLAKE2S_BLOCK_SIZE);
/* Hash one less (full) block than strictly possible */
- (*compress)(state, in, nblocks - 1, BLAKE2S_BLOCK_SIZE);
+ if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_BLAKE2S))
+ (*compress)(state, in, nblocks - 1, BLAKE2S_BLOCK_SIZE);
+ else
+ blake2s_compress_generic(state, in, nblocks - 1,
+ BLAKE2S_BLOCK_SIZE);
in += BLAKE2S_BLOCK_SIZE * (nblocks - 1);
inlen -= BLAKE2S_BLOCK_SIZE * (nblocks - 1);
}
@@ -61,7 +69,10 @@ static inline void __blake2s_final(struct
blake2s_state *state, u8 *out,
blake2s_set_lastblock(state);
memset(state->buf + state->buflen, 0,
BLAKE2S_BLOCK_SIZE - state->buflen); /* Padding */
- (*compress)(state, state->buf, 1, state->buflen);
+ if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_BLAKE2S))
+ (*compress)(state, state->buf, 1, state->buflen);
+ else
+ blake2s_compress_generic(state, state->buf, 1, state->buflen);
cpu_to_le32_array(state->h, ARRAY_SIZE(state->h));