2020-05-02 18:45:30

by Eric Biggers

[permalink] [raw]
Subject: [PATCH] crypto: lib/aes - move IRQ disabling into AES library

From: Eric Biggers <[email protected]>

The AES library code (which originally came from crypto/aes_ti.c) is
supposed to be constant-time, to the extent possible for a C
implementation. But the hardening measure of disabling interrupts while
the S-box is loaded into cache was not included in the library version;
it was left only in the crypto API wrapper in crypto/aes_ti.c.

Move this logic into the library version so that everyone gets it.

Fixes: e59c1c987456 ("crypto: aes - create AES library based on the fixed time AES code")
Cc: <[email protected]> # v5.4+
Cc: Ard Biesheuvel <[email protected]>
Signed-off-by: Eric Biggers <[email protected]>
---
crypto/aes_ti.c | 18 ------------------
lib/crypto/aes.c | 18 ++++++++++++++++++
2 files changed, 18 insertions(+), 18 deletions(-)

diff --git a/crypto/aes_ti.c b/crypto/aes_ti.c
index 205c2c257d4926..121f36621d6dcf 100644
--- a/crypto/aes_ti.c
+++ b/crypto/aes_ti.c
@@ -20,33 +20,15 @@ static int aesti_set_key(struct crypto_tfm *tfm, const u8 *in_key,
static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
- unsigned long flags;
-
- /*
- * Temporarily disable interrupts to avoid races where cachelines are
- * evicted when the CPU is interrupted to do something else.
- */
- local_irq_save(flags);

aes_encrypt(ctx, out, in);
-
- local_irq_restore(flags);
}

static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
- unsigned long flags;
-
- /*
- * Temporarily disable interrupts to avoid races where cachelines are
- * evicted when the CPU is interrupted to do something else.
- */
- local_irq_save(flags);

aes_decrypt(ctx, out, in);
-
- local_irq_restore(flags);
}

static struct crypto_alg aes_alg = {
diff --git a/lib/crypto/aes.c b/lib/crypto/aes.c
index 827fe89922fff0..029d8d0eac1f6e 100644
--- a/lib/crypto/aes.c
+++ b/lib/crypto/aes.c
@@ -260,6 +260,7 @@ void aes_encrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
const u32 *rkp = ctx->key_enc + 4;
int rounds = 6 + ctx->key_length / 4;
u32 st0[4], st1[4];
+ unsigned long flags;
int round;

st0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in);
@@ -267,6 +268,12 @@ void aes_encrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
st0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8);
st0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12);

+ /*
+ * Temporarily disable interrupts to avoid races where cachelines are
+ * evicted when the CPU is interrupted to do something else.
+ */
+ local_irq_save(flags);
+
/*
* Force the compiler to emit data independent Sbox references,
* by xoring the input with Sbox values that are known to add up
@@ -297,6 +304,8 @@ void aes_encrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
put_unaligned_le32(subshift(st1, 1) ^ rkp[5], out + 4);
put_unaligned_le32(subshift(st1, 2) ^ rkp[6], out + 8);
put_unaligned_le32(subshift(st1, 3) ^ rkp[7], out + 12);
+
+ local_irq_restore(flags);
}
EXPORT_SYMBOL(aes_encrypt);

@@ -311,6 +320,7 @@ void aes_decrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
const u32 *rkp = ctx->key_dec + 4;
int rounds = 6 + ctx->key_length / 4;
u32 st0[4], st1[4];
+ unsigned long flags;
int round;

st0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in);
@@ -318,6 +328,12 @@ void aes_decrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
st0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8);
st0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12);

+ /*
+ * Temporarily disable interrupts to avoid races where cachelines are
+ * evicted when the CPU is interrupted to do something else.
+ */
+ local_irq_save(flags);
+
/*
* Force the compiler to emit data independent Sbox references,
* by xoring the input with Sbox values that are known to add up
@@ -348,6 +364,8 @@ void aes_decrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
put_unaligned_le32(inv_subshift(st1, 1) ^ rkp[5], out + 4);
put_unaligned_le32(inv_subshift(st1, 2) ^ rkp[6], out + 8);
put_unaligned_le32(inv_subshift(st1, 3) ^ rkp[7], out + 12);
+
+ local_irq_restore(flags);
}
EXPORT_SYMBOL(aes_decrypt);

--
2.26.2


2020-05-02 18:56:37

by Ard Biesheuvel

[permalink] [raw]
Subject: Re: [PATCH] crypto: lib/aes - move IRQ disabling into AES library

On Sat, 2 May 2020 at 20:44, Eric Biggers <[email protected]> wrote:
>
> From: Eric Biggers <[email protected]>
>
> The AES library code (which originally came from crypto/aes_ti.c) is
> supposed to be constant-time, to the extent possible for a C
> implementation. But the hardening measure of disabling interrupts while
> the S-box is loaded into cache was not included in the library version;
> it was left only in the crypto API wrapper in crypto/aes_ti.c.
>
> Move this logic into the library version so that everyone gets it.
>

I don't think we should fiddle with interrupts in a general purpose
crypto library.

We /could/ add a variant aes_encrypt_irq_off() if you really want, but
this is not something you should get without asking explicitly imo.



> Fixes: e59c1c987456 ("crypto: aes - create AES library based on the fixed time AES code")
> Cc: <[email protected]> # v5.4+
> Cc: Ard Biesheuvel <[email protected]>
> Signed-off-by: Eric Biggers <[email protected]>
> ---
> crypto/aes_ti.c | 18 ------------------
> lib/crypto/aes.c | 18 ++++++++++++++++++
> 2 files changed, 18 insertions(+), 18 deletions(-)
>
> diff --git a/crypto/aes_ti.c b/crypto/aes_ti.c
> index 205c2c257d4926..121f36621d6dcf 100644
> --- a/crypto/aes_ti.c
> +++ b/crypto/aes_ti.c
> @@ -20,33 +20,15 @@ static int aesti_set_key(struct crypto_tfm *tfm, const u8 *in_key,
> static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
> {
> const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
> - unsigned long flags;
> -
> - /*
> - * Temporarily disable interrupts to avoid races where cachelines are
> - * evicted when the CPU is interrupted to do something else.
> - */
> - local_irq_save(flags);
>
> aes_encrypt(ctx, out, in);
> -
> - local_irq_restore(flags);
> }
>
> static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
> {
> const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
> - unsigned long flags;
> -
> - /*
> - * Temporarily disable interrupts to avoid races where cachelines are
> - * evicted when the CPU is interrupted to do something else.
> - */
> - local_irq_save(flags);
>
> aes_decrypt(ctx, out, in);
> -
> - local_irq_restore(flags);
> }
>
> static struct crypto_alg aes_alg = {
> diff --git a/lib/crypto/aes.c b/lib/crypto/aes.c
> index 827fe89922fff0..029d8d0eac1f6e 100644
> --- a/lib/crypto/aes.c
> +++ b/lib/crypto/aes.c
> @@ -260,6 +260,7 @@ void aes_encrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
> const u32 *rkp = ctx->key_enc + 4;
> int rounds = 6 + ctx->key_length / 4;
> u32 st0[4], st1[4];
> + unsigned long flags;
> int round;
>
> st0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in);
> @@ -267,6 +268,12 @@ void aes_encrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
> st0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8);
> st0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12);
>
> + /*
> + * Temporarily disable interrupts to avoid races where cachelines are
> + * evicted when the CPU is interrupted to do something else.
> + */
> + local_irq_save(flags);
> +
> /*
> * Force the compiler to emit data independent Sbox references,
> * by xoring the input with Sbox values that are known to add up
> @@ -297,6 +304,8 @@ void aes_encrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
> put_unaligned_le32(subshift(st1, 1) ^ rkp[5], out + 4);
> put_unaligned_le32(subshift(st1, 2) ^ rkp[6], out + 8);
> put_unaligned_le32(subshift(st1, 3) ^ rkp[7], out + 12);
> +
> + local_irq_restore(flags);
> }
> EXPORT_SYMBOL(aes_encrypt);
>
> @@ -311,6 +320,7 @@ void aes_decrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
> const u32 *rkp = ctx->key_dec + 4;
> int rounds = 6 + ctx->key_length / 4;
> u32 st0[4], st1[4];
> + unsigned long flags;
> int round;
>
> st0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in);
> @@ -318,6 +328,12 @@ void aes_decrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
> st0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8);
> st0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12);
>
> + /*
> + * Temporarily disable interrupts to avoid races where cachelines are
> + * evicted when the CPU is interrupted to do something else.
> + */
> + local_irq_save(flags);
> +
> /*
> * Force the compiler to emit data independent Sbox references,
> * by xoring the input with Sbox values that are known to add up
> @@ -348,6 +364,8 @@ void aes_decrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
> put_unaligned_le32(inv_subshift(st1, 1) ^ rkp[5], out + 4);
> put_unaligned_le32(inv_subshift(st1, 2) ^ rkp[6], out + 8);
> put_unaligned_le32(inv_subshift(st1, 3) ^ rkp[7], out + 12);
> +
> + local_irq_restore(flags);
> }
> EXPORT_SYMBOL(aes_decrypt);
>
> --
> 2.26.2
>