2022-12-13 10:58:38

by David Keisar Schm

[permalink] [raw]
Subject: [PATCH v2 0/3] Replace invocations of prandom_u32_state, prandom_bytes_state with get_random_u32, get_random_bytes

From: David <[email protected]>

The security improvements for prandom_u32 done specifically in
commits c51f8f88d705e06bd696d7510aff22b33eb8e638 from October 2020
and d4150779e60fb6c49be25572596b2cdfc5d46a09 from May 2022)
didn't handle the cases when prandom_bytes_state() and prandom_u32_state()
are used. We have now added the necessary changes to handle
these cases as well.

David (3):
Replace invocation of weak PRNG in mm/slab.c
Replace invocation of weak PRNG inside mm/slab_common.c
Replace invocation of weak PRNG in arch/x86/mm/kaslr.c

arch/x86/mm/kaslr.c | 5 +----
mm/slab.c | 20 ++++++++------------
mm/slab_common.c | 10 +++-------
3 files changed, 12 insertions(+), 23 deletions(-)

--
2.38.0


2022-12-13 11:04:48

by David Keisar Schm

[permalink] [raw]
Subject: [PATCH v2 3/3] Replace invocation of weak PRNG in arch/x86/mm/kaslr.c

From: David <[email protected]>

We changed the invocation
of prandom_bytes_state which is
considered weak to get_random_bytes.
We also omitted the call to the
seeding function,
since get_random_bytes matintains
its own state,
so there is no need for seeding here anymore.
This is important for the memory initial state
randomization.

Signed-off-by: David <[email protected]>
---
arch/x86/mm/kaslr.c | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/arch/x86/mm/kaslr.c b/arch/x86/mm/kaslr.c
index 0bb083979..9ef8993d5 100644
--- a/arch/x86/mm/kaslr.c
+++ b/arch/x86/mm/kaslr.c
@@ -66,7 +66,6 @@ void __init kernel_randomize_memory(void)
size_t i;
unsigned long vaddr_start, vaddr;
unsigned long rand, memory_tb;
- struct rnd_state rand_state;
unsigned long remain_entropy;
unsigned long vmemmap_size;

@@ -113,8 +112,6 @@ void __init kernel_randomize_memory(void)
for (i = 0; i < ARRAY_SIZE(kaslr_regions); i++)
remain_entropy -= get_padding(&kaslr_regions[i]);

- prandom_seed_state(&rand_state, kaslr_get_random_long("Memory"));
-
for (i = 0; i < ARRAY_SIZE(kaslr_regions); i++) {
unsigned long entropy;

@@ -123,7 +120,7 @@ void __init kernel_randomize_memory(void)
* available.
*/
entropy = remain_entropy / (ARRAY_SIZE(kaslr_regions) - i);
- prandom_bytes_state(&rand_state, &rand, sizeof(rand));
+ get_random_bytes(&rand, sizeof(rand));
entropy = (rand % (entropy + 1)) & PUD_MASK;
vaddr += entropy;
*kaslr_regions[i].base = vaddr;
--
2.38.0

2022-12-13 11:06:50

by David Keisar Schm

[permalink] [raw]
Subject: [PATCH v2 1/3] Replace invocation of weak PRNG in mm/slab.c

From: David <[email protected]>

We changed the invocation
of prandom_u32_state to get_random_u32.
We also changed the freelist_init_state
to struct instead of a union,
since the rnd_state is not needed anymore
- get_random_u32 maintains its own state.
This change it important since it
is make the slab allocator randomization
stronger.

Signed-off-by: David <[email protected]>
---
mm/slab.c | 20 ++++++++------------
1 file changed, 8 insertions(+), 12 deletions(-)

diff --git a/mm/slab.c b/mm/slab.c
index 92d6b1d48..1476104f4 100644
--- a/mm/slab.c
+++ b/mm/slab.c
@@ -2360,20 +2360,17 @@ static void cache_init_objs_debug(struct kmem_cache *cachep, struct slab *slab)

#ifdef CONFIG_SLAB_FREELIST_RANDOM
/* Hold information during a freelist initialization */
-union freelist_init_state {
- struct {
- unsigned int pos;
- unsigned int *list;
- unsigned int count;
- };
- struct rnd_state rnd_state;
+struct freelist_init_state {
+ unsigned int pos;
+ unsigned int *list;
+ unsigned int count;
};

/*
* Initialize the state based on the randomization method available.
* return true if the pre-computed list is available, false otherwise.
*/
-static bool freelist_state_initialize(union freelist_init_state *state,
+static bool freelist_state_initialize(struct freelist_init_state *state,
struct kmem_cache *cachep,
unsigned int count)
{
@@ -2385,7 +2382,6 @@ static bool freelist_state_initialize(union freelist_init_state *state,

/* Use a random state if the pre-computed list is not available */
if (!cachep->random_seq) {
- prandom_seed_state(&state->rnd_state, rand);
ret = false;
} else {
state->list = cachep->random_seq;
@@ -2397,7 +2393,7 @@ static bool freelist_state_initialize(union freelist_init_state *state,
}

/* Get the next entry on the list and randomize it using a random shift */
-static freelist_idx_t next_random_slot(union freelist_init_state *state)
+static freelist_idx_t next_random_slot(struct freelist_init_state *state)
{
if (state->pos >= state->count)
state->pos = 0;
@@ -2418,7 +2414,7 @@ static void swap_free_obj(struct slab *slab, unsigned int a, unsigned int b)
static bool shuffle_freelist(struct kmem_cache *cachep, struct slab *slab)
{
unsigned int objfreelist = 0, i, rand, count = cachep->num;
- union freelist_init_state state;
+ struct freelist_init_state state;
bool precomputed;

if (count < 2)
@@ -2447,7 +2443,7 @@ static bool shuffle_freelist(struct kmem_cache *cachep, struct slab *slab)

/* Fisher-Yates shuffle */
for (i = count - 1; i > 0; i--) {
- rand = prandom_u32_state(&state.rnd_state);
+ rand = get_random_u32();
rand %= (i + 1);
swap_free_obj(slab, i, rand);
}
--
2.38.0

2022-12-13 15:33:46

by Matthew Wilcox

[permalink] [raw]
Subject: Re: [PATCH v2 1/3] Replace invocation of weak PRNG in mm/slab.c

On Tue, Dec 13, 2022 at 12:34:57PM +0200, [email protected] wrote:
> From: David <[email protected]>

It's normal to include the surname in your sign-off, fwiw.

> @@ -2447,7 +2443,7 @@ static bool shuffle_freelist(struct kmem_cache *cachep, struct slab *slab)
>
> /* Fisher-Yates shuffle */
> for (i = count - 1; i > 0; i--) {
> - rand = prandom_u32_state(&state.rnd_state);
> + rand = get_random_u32();
> rand %= (i + 1);

Shouldn't this be "rand = get_random_u32_below(i + 1)"?

> swap_free_obj(slab, i, rand);
> }
> --
> 2.38.0
>
>