2023-04-15 17:38:35

by David Keisar Schm

[permalink] [raw]
Subject: [PATCH v5 1/3] Replace invocation of weak PRNG

From: David Keisar Schmidt <[email protected]>

The Slab allocator randomization uses the prandom_u32
PRNG. That was added to prevent attackers to obtain information on the heap
state, by randomizing the freelists state.

However, this PRNG turned out to be weak, as noted in commit c51f8f88d705
To fix it, we have changed the invocation of prandom_u32_state to get_random_u32
to ensure the PRNG is strong. Since a modulo operation is applied right after that,
we used get_random_u32_below, to achieve uniformity.

In addition, we changed the freelist_init_state union to struct,
since the rnd_state inside which is used to store the state of prandom_u32,
is not needed anymore, since get_random_u32 maintains its own state.

Signed-off-by: David Keisar Schmidt <[email protected]>
---
This fifth series changes only the arch/x86/mm/kaslr patch.

Changes since v3:

* edited commit message.

Changes since v2:

* replaced instances of get_random_u32 with get_random_u32_below
in mm/slab.c.

mm/slab.c | 25 ++++++++++---------------
1 file changed, 10 insertions(+), 15 deletions(-)

diff --git a/mm/slab.c b/mm/slab.c
index edbe722fb..7c8bb4a8f 100644
--- a/mm/slab.c
+++ b/mm/slab.c
@@ -2360,20 +2360,17 @@ static void cache_init_objs_debug(struct kmem_cache *cachep, struct slab *slab)

#ifdef CONFIG_SLAB_FREELIST_RANDOM
/* Hold information during a freelist initialization */
-union freelist_init_state {
- struct {
- unsigned int pos;
- unsigned int *list;
- unsigned int count;
- };
- struct rnd_state rnd_state;
+struct freelist_init_state {
+ unsigned int pos;
+ unsigned int *list;
+ unsigned int count;
};

/*
* Initialize the state based on the randomization method available.
* return true if the pre-computed list is available, false otherwise.
*/
-static bool freelist_state_initialize(union freelist_init_state *state,
+static bool freelist_state_initialize(struct freelist_init_state *state,
struct kmem_cache *cachep,
unsigned int count)
{
@@ -2381,23 +2378,22 @@ static bool freelist_state_initialize(union freelist_init_state *state,
unsigned int rand;

/* Use best entropy available to define a random shift */
- rand = get_random_u32();
+ rand = get_random_u32_below(count);

/* Use a random state if the pre-computed list is not available */
if (!cachep->random_seq) {
- prandom_seed_state(&state->rnd_state, rand);
ret = false;
} else {
state->list = cachep->random_seq;
state->count = count;
- state->pos = rand % count;
+ state->pos = rand;
ret = true;
}
return ret;
}

/* Get the next entry on the list and randomize it using a random shift */
-static freelist_idx_t next_random_slot(union freelist_init_state *state)
+static freelist_idx_t next_random_slot(struct freelist_init_state *state)
{
if (state->pos >= state->count)
state->pos = 0;
@@ -2418,7 +2414,7 @@ static void swap_free_obj(struct slab *slab, unsigned int a, unsigned int b)
static bool shuffle_freelist(struct kmem_cache *cachep, struct slab *slab)
{
unsigned int objfreelist = 0, i, rand, count = cachep->num;
- union freelist_init_state state;
+ struct freelist_init_state state;
bool precomputed;

if (count < 2)
@@ -2447,8 +2443,7 @@ static bool shuffle_freelist(struct kmem_cache *cachep, struct slab *slab)

/* Fisher-Yates shuffle */
for (i = count - 1; i > 0; i--) {
- rand = prandom_u32_state(&state.rnd_state);
- rand %= (i + 1);
+ rand = get_random_u32_below(i+1);
swap_free_obj(slab, i, rand);
}
} else {
--
2.37.3


2023-04-16 11:55:03

by Hyeonggon Yoo

[permalink] [raw]
Subject: Re: [PATCH v5 1/3] Replace invocation of weak PRNG

On Sat, Apr 15, 2023 at 08:36:32PM +0300, [email protected] wrote:
> From: David Keisar Schmidt <[email protected]>
>
> The Slab allocator randomization uses the prandom_u32
> PRNG. That was added to prevent attackers to obtain information on the heap
> state, by randomizing the freelists state.
>
> However, this PRNG turned out to be weak, as noted in commit c51f8f88d705
> To fix it, we have changed the invocation of prandom_u32_state to get_random_u32
> to ensure the PRNG is strong. Since a modulo operation is applied right after that,
> we used get_random_u32_below, to achieve uniformity.
>
> In addition, we changed the freelist_init_state union to struct,
> since the rnd_state inside which is used to store the state of prandom_u32,
> is not needed anymore, since get_random_u32 maintains its own state.

makes sense to me, but some nits:

I think the subject should start with "mm/slab:" for this patch
and "mm/slab_common:" for the next patch.

>
> Signed-off-by: David Keisar Schmidt <[email protected]>
> ---
> This fifth series changes only the arch/x86/mm/kaslr patch.
>
> Changes since v3:
>
> * edited commit message.
>
> Changes since v2:
>
> * replaced instances of get_random_u32 with get_random_u32_below
> in mm/slab.c.
>
> mm/slab.c | 25 ++++++++++---------------
> 1 file changed, 10 insertions(+), 15 deletions(-)
>
> diff --git a/mm/slab.c b/mm/slab.c
> index edbe722fb..7c8bb4a8f 100644
> --- a/mm/slab.c
> +++ b/mm/slab.c
> @@ -2360,20 +2360,17 @@ static void cache_init_objs_debug(struct kmem_cache *cachep, struct slab *slab)
>
> #ifdef CONFIG_SLAB_FREELIST_RANDOM
> /* Hold information during a freelist initialization */
> -union freelist_init_state {
> - struct {
> - unsigned int pos;
> - unsigned int *list;
> - unsigned int count;
> - };
> - struct rnd_state rnd_state;
> +struct freelist_init_state {
> + unsigned int pos;
> + unsigned int *list;
> + unsigned int count;
> };
>
> /*
> * Initialize the state based on the randomization method available.
> * return true if the pre-computed list is available, false otherwise.
> */
> -static bool freelist_state_initialize(union freelist_init_state *state,
> +static bool freelist_state_initialize(struct freelist_init_state *state,
> struct kmem_cache *cachep,
> unsigned int count)
> {
> @@ -2381,23 +2378,22 @@ static bool freelist_state_initialize(union freelist_init_state *state,
> unsigned int rand;
>
> /* Use best entropy available to define a random shift */
> - rand = get_random_u32();
> + rand = get_random_u32_below(count);
>

> /* Use a random state if the pre-computed list is not available */

This comment should be removed too.

> if (!cachep->random_seq) {
> - prandom_seed_state(&state->rnd_state, rand);
> ret = false;
> } else {
> state->list = cachep->random_seq;
> state->count = count;
> - state->pos = rand % count;
> + state->pos = rand;

this could be:
state->pos = get_random_u32_below(count);

without defining the variable rand.

> ret = true;
> }
> return ret;
> }
>
> /* Get the next entry on the list and randomize it using a random shift */
> -static freelist_idx_t next_random_slot(union freelist_init_state *state)
> +static freelist_idx_t next_random_slot(struct freelist_init_state *state)
> {
> if (state->pos >= state->count)
> state->pos = 0;
> @@ -2418,7 +2414,7 @@ static void swap_free_obj(struct slab *slab, unsigned int a, unsigned int b)
> static bool shuffle_freelist(struct kmem_cache *cachep, struct slab *slab)
> {
> unsigned int objfreelist = 0, i, rand, count = cachep->num;
> - union freelist_init_state state;
> + struct freelist_init_state state;
> bool precomputed;
>
> if (count < 2)
> @@ -2447,8 +2443,7 @@ static bool shuffle_freelist(struct kmem_cache *cachep, struct slab *slab)
>
> /* Fisher-Yates shuffle */
> for (i = count - 1; i > 0; i--) {
> - rand = prandom_u32_state(&state.rnd_state);
> - rand %= (i + 1);
> + rand = get_random_u32_below(i+1);

per the coding standard this should be

rand = get_random_u32_below(i +1);

> swap_free_obj(slab, i, rand);
> }
> } else {
> --
> 2.37.3
>