2023-01-10 16:06:02

by Vernon Yang

[permalink] [raw]
Subject: [PATCH] maple_tree: remove the parameter entry of mas_preallocate

The parameter entry of mas_preallocate is not used, so drop it.

Signed-off-by: Vernon Yang <[email protected]>
---
include/linux/maple_tree.h | 2 +-
lib/maple_tree.c | 3 +--
mm/mmap.c | 16 ++++++++--------
mm/nommu.c | 8 ++++----
tools/testing/radix-tree/maple.c | 32 ++++++++++++++++----------------
5 files changed, 30 insertions(+), 31 deletions(-)

diff --git a/include/linux/maple_tree.h b/include/linux/maple_tree.h
index e594db58a0f1..a0d43087f27a 100644
--- a/include/linux/maple_tree.h
+++ b/include/linux/maple_tree.h
@@ -456,7 +456,7 @@ int mas_store_gfp(struct ma_state *mas, void *entry, gfp_t gfp);
void mas_store_prealloc(struct ma_state *mas, void *entry);
void *mas_find(struct ma_state *mas, unsigned long max);
void *mas_find_rev(struct ma_state *mas, unsigned long min);
-int mas_preallocate(struct ma_state *mas, void *entry, gfp_t gfp);
+int mas_preallocate(struct ma_state *mas, gfp_t gfp);
bool mas_is_err(struct ma_state *mas);

bool mas_nomem(struct ma_state *mas, gfp_t gfp);
diff --git a/lib/maple_tree.c b/lib/maple_tree.c
index 69be9d3db0c8..96fb4b416697 100644
--- a/lib/maple_tree.c
+++ b/lib/maple_tree.c
@@ -5712,12 +5712,11 @@ EXPORT_SYMBOL_GPL(mas_store_prealloc);
/**
* mas_preallocate() - Preallocate enough nodes for a store operation
* @mas: The maple state
- * @entry: The entry that will be stored
* @gfp: The GFP_FLAGS to use for allocations.
*
* Return: 0 on success, -ENOMEM if memory could not be allocated.
*/
-int mas_preallocate(struct ma_state *mas, void *entry, gfp_t gfp)
+int mas_preallocate(struct ma_state *mas, gfp_t gfp)
{
int ret;

diff --git a/mm/mmap.c b/mm/mmap.c
index e06f9ae34ff8..64bdd38e8d8e 100644
--- a/mm/mmap.c
+++ b/mm/mmap.c
@@ -472,7 +472,7 @@ static int vma_link(struct mm_struct *mm, struct vm_area_struct *vma)
MA_STATE(mas, &mm->mm_mt, 0, 0);
struct address_space *mapping = NULL;

- if (mas_preallocate(&mas, vma, GFP_KERNEL))
+ if (mas_preallocate(&mas, GFP_KERNEL))
return -ENOMEM;

if (vma->vm_file) {
@@ -538,7 +538,7 @@ inline int vma_expand(struct ma_state *mas, struct vm_area_struct *vma,
/* Only handles expanding */
VM_BUG_ON(vma->vm_start < start || vma->vm_end > end);

- if (mas_preallocate(mas, vma, GFP_KERNEL))
+ if (mas_preallocate(mas, GFP_KERNEL))
goto nomem;

vma_adjust_trans_huge(vma, start, end, 0);
@@ -712,7 +712,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start,
}
}

- if (mas_preallocate(&mas, vma, GFP_KERNEL))
+ if (mas_preallocate(&mas, GFP_KERNEL))
return -ENOMEM;

vma_adjust_trans_huge(orig_vma, start, end, adjust_next);
@@ -1934,7 +1934,7 @@ int expand_upwards(struct vm_area_struct *vma, unsigned long address)
/* Check that both stack segments have the same anon_vma? */
}

- if (mas_preallocate(&mas, vma, GFP_KERNEL))
+ if (mas_preallocate(&mas, GFP_KERNEL))
return -ENOMEM;

/* We must make sure the anon_vma is allocated. */
@@ -2015,7 +2015,7 @@ int expand_downwards(struct vm_area_struct *vma, unsigned long address)
return -ENOMEM;
}

- if (mas_preallocate(&mas, vma, GFP_KERNEL))
+ if (mas_preallocate(&mas, GFP_KERNEL))
return -ENOMEM;

/* We must make sure the anon_vma is allocated. */
@@ -2307,7 +2307,7 @@ do_mas_align_munmap(struct ma_state *mas, struct vm_area_struct *vma,
mt_init_flags(&mt_detach, MT_FLAGS_LOCK_EXTERN);
mt_set_external_lock(&mt_detach, &mm->mmap_lock);

- if (mas_preallocate(mas, vma, GFP_KERNEL))
+ if (mas_preallocate(mas, GFP_KERNEL))
return -ENOMEM;

mas->last = end - 1;
@@ -2676,7 +2676,7 @@ unsigned long mmap_region(struct file *file, unsigned long addr,
goto free_vma;
}

- if (mas_preallocate(&mas, vma, GFP_KERNEL)) {
+ if (mas_preallocate(&mas, GFP_KERNEL)) {
error = -ENOMEM;
if (file)
goto close_and_free_vma;
@@ -2949,7 +2949,7 @@ static int do_brk_flags(struct ma_state *mas, struct vm_area_struct *vma,
can_vma_merge_after(vma, flags, NULL, NULL,
addr >> PAGE_SHIFT, NULL_VM_UFFD_CTX, NULL)) {
mas_set_range(mas, vma->vm_start, addr + len - 1);
- if (mas_preallocate(mas, vma, GFP_KERNEL))
+ if (mas_preallocate(mas, GFP_KERNEL))
goto unacct_fail;

vma_adjust_trans_huge(vma, vma->vm_start, addr + len, 0);
diff --git a/mm/nommu.c b/mm/nommu.c
index 214c70e1d059..0befa4060aea 100644
--- a/mm/nommu.c
+++ b/mm/nommu.c
@@ -602,7 +602,7 @@ static int add_vma_to_mm(struct mm_struct *mm, struct vm_area_struct *vma)
{
MA_STATE(mas, &mm->mm_mt, vma->vm_start, vma->vm_end);

- if (mas_preallocate(&mas, vma, GFP_KERNEL)) {
+ if (mas_preallocate(&mas, GFP_KERNEL)) {
pr_warn("Allocation of vma tree for process %d failed\n",
current->pid);
return -ENOMEM;
@@ -633,7 +633,7 @@ static int delete_vma_from_mm(struct vm_area_struct *vma)
{
MA_STATE(mas, &vma->vm_mm->mm_mt, 0, 0);

- if (mas_preallocate(&mas, vma, GFP_KERNEL)) {
+ if (mas_preallocate(&mas, GFP_KERNEL)) {
pr_warn("Allocation of vma tree for process %d failed\n",
current->pid);
return -ENOMEM;
@@ -1081,7 +1081,7 @@ unsigned long do_mmap(struct file *file,
if (!vma)
goto error_getting_vma;

- if (mas_preallocate(&mas, vma, GFP_KERNEL))
+ if (mas_preallocate(&mas, GFP_KERNEL))
goto error_maple_preallocate;

region->vm_usage = 1;
@@ -1358,7 +1358,7 @@ int split_vma(struct mm_struct *mm, struct vm_area_struct *vma,
if (!new)
goto err_vma_dup;

- if (mas_preallocate(&mas, vma, GFP_KERNEL)) {
+ if (mas_preallocate(&mas, GFP_KERNEL)) {
pr_warn("Allocation of vma tree for process %d failed\n",
current->pid);
goto err_mas_preallocate;
diff --git a/tools/testing/radix-tree/maple.c b/tools/testing/radix-tree/maple.c
index 81fa7ec2e66a..8170ef39d8c4 100644
--- a/tools/testing/radix-tree/maple.c
+++ b/tools/testing/radix-tree/maple.c
@@ -35342,7 +35342,7 @@ static noinline void check_prealloc(struct maple_tree *mt)
for (i = 0; i <= max; i++)
mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);

- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated == 0);
@@ -35351,18 +35351,18 @@ static noinline void check_prealloc(struct maple_tree *mt)
allocated = mas_allocated(&mas);
MT_BUG_ON(mt, allocated != 0);

- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated == 0);
MT_BUG_ON(mt, allocated != 1 + height * 3);
- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
mas_destroy(&mas);
allocated = mas_allocated(&mas);
MT_BUG_ON(mt, allocated != 0);


- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated == 0);
@@ -35370,25 +35370,25 @@ static noinline void check_prealloc(struct maple_tree *mt)
mn = mas_pop_node(&mas);
MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1);
ma_free_rcu(mn);
- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
mas_destroy(&mas);
allocated = mas_allocated(&mas);
MT_BUG_ON(mt, allocated != 0);

- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated == 0);
MT_BUG_ON(mt, allocated != 1 + height * 3);
mn = mas_pop_node(&mas);
MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1);
- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
mas_destroy(&mas);
allocated = mas_allocated(&mas);
MT_BUG_ON(mt, allocated != 0);
ma_free_rcu(mn);

- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated == 0);
@@ -35397,12 +35397,12 @@ static noinline void check_prealloc(struct maple_tree *mt)
MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1);
mas_push_node(&mas, mn);
MT_BUG_ON(mt, mas_allocated(&mas) != allocated);
- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
mas_destroy(&mas);
allocated = mas_allocated(&mas);
MT_BUG_ON(mt, allocated != 0);

- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated == 0);
@@ -35410,21 +35410,21 @@ static noinline void check_prealloc(struct maple_tree *mt)
mas_store_prealloc(&mas, ptr);
MT_BUG_ON(mt, mas_allocated(&mas) != 0);

- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated == 0);
MT_BUG_ON(mt, allocated != 1 + height * 3);
mas_store_prealloc(&mas, ptr);
MT_BUG_ON(mt, mas_allocated(&mas) != 0);
- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated == 0);
MT_BUG_ON(mt, allocated != 1 + height * 3);
mas_store_prealloc(&mas, ptr);

- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated == 0);
@@ -35432,14 +35432,14 @@ static noinline void check_prealloc(struct maple_tree *mt)
mas_store_prealloc(&mas, ptr);
MT_BUG_ON(mt, mas_allocated(&mas) != 0);
mt_set_non_kernel(1);
- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL & GFP_NOWAIT) == 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated != 0);
mas_destroy(&mas);


- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated == 0);
@@ -35447,7 +35447,7 @@ static noinline void check_prealloc(struct maple_tree *mt)
mas_store_prealloc(&mas, ptr);
MT_BUG_ON(mt, mas_allocated(&mas) != 0);
mt_set_non_kernel(1);
- MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0);
+ MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL & GFP_NOWAIT) == 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated != 0);
--
2.34.1


2023-03-20 19:34:21

by Liam R. Howlett

[permalink] [raw]
Subject: Re: [PATCH] maple_tree: remove the parameter entry of mas_preallocate

* Vernon Yang <[email protected]> [230110 10:42]:
> The parameter entry of mas_preallocate is not used, so drop it.

This parameter was meant to reduce the allocations needed to store a
given value. Since NULLs behave differently than actual values (NULLs
are combined), the value being stored was going to be used.

Since the fix to remove GFP_ZERO from the allocator of the maple tree
nodes [1], this may no longer be a worth while optimization.

[1] https://lore.kernel.org/all/[email protected]/


Acked-by: Liam R. Howlett <[email protected]>

>
> Signed-off-by: Vernon Yang <[email protected]>
> ---
> include/linux/maple_tree.h | 2 +-
> lib/maple_tree.c | 3 +--
> mm/mmap.c | 16 ++++++++--------
> mm/nommu.c | 8 ++++----
> tools/testing/radix-tree/maple.c | 32 ++++++++++++++++----------------
> 5 files changed, 30 insertions(+), 31 deletions(-)
>
> diff --git a/include/linux/maple_tree.h b/include/linux/maple_tree.h
> index e594db58a0f1..a0d43087f27a 100644
> --- a/include/linux/maple_tree.h
> +++ b/include/linux/maple_tree.h
> @@ -456,7 +456,7 @@ int mas_store_gfp(struct ma_state *mas, void *entry, gfp_t gfp);
> void mas_store_prealloc(struct ma_state *mas, void *entry);
> void *mas_find(struct ma_state *mas, unsigned long max);
> void *mas_find_rev(struct ma_state *mas, unsigned long min);
> -int mas_preallocate(struct ma_state *mas, void *entry, gfp_t gfp);
> +int mas_preallocate(struct ma_state *mas, gfp_t gfp);
> bool mas_is_err(struct ma_state *mas);
>
> bool mas_nomem(struct ma_state *mas, gfp_t gfp);
> diff --git a/lib/maple_tree.c b/lib/maple_tree.c
> index 69be9d3db0c8..96fb4b416697 100644
> --- a/lib/maple_tree.c
> +++ b/lib/maple_tree.c
> @@ -5712,12 +5712,11 @@ EXPORT_SYMBOL_GPL(mas_store_prealloc);
> /**
> * mas_preallocate() - Preallocate enough nodes for a store operation
> * @mas: The maple state
> - * @entry: The entry that will be stored
> * @gfp: The GFP_FLAGS to use for allocations.
> *
> * Return: 0 on success, -ENOMEM if memory could not be allocated.
> */
> -int mas_preallocate(struct ma_state *mas, void *entry, gfp_t gfp)
> +int mas_preallocate(struct ma_state *mas, gfp_t gfp)
> {
> int ret;
>
> diff --git a/mm/mmap.c b/mm/mmap.c
> index e06f9ae34ff8..64bdd38e8d8e 100644
> --- a/mm/mmap.c
> +++ b/mm/mmap.c
> @@ -472,7 +472,7 @@ static int vma_link(struct mm_struct *mm, struct vm_area_struct *vma)
> MA_STATE(mas, &mm->mm_mt, 0, 0);
> struct address_space *mapping = NULL;
>
> - if (mas_preallocate(&mas, vma, GFP_KERNEL))
> + if (mas_preallocate(&mas, GFP_KERNEL))
> return -ENOMEM;
>
> if (vma->vm_file) {
> @@ -538,7 +538,7 @@ inline int vma_expand(struct ma_state *mas, struct vm_area_struct *vma,
> /* Only handles expanding */
> VM_BUG_ON(vma->vm_start < start || vma->vm_end > end);
>
> - if (mas_preallocate(mas, vma, GFP_KERNEL))
> + if (mas_preallocate(mas, GFP_KERNEL))
> goto nomem;
>
> vma_adjust_trans_huge(vma, start, end, 0);
> @@ -712,7 +712,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start,
> }
> }
>
> - if (mas_preallocate(&mas, vma, GFP_KERNEL))
> + if (mas_preallocate(&mas, GFP_KERNEL))
> return -ENOMEM;
>
> vma_adjust_trans_huge(orig_vma, start, end, adjust_next);
> @@ -1934,7 +1934,7 @@ int expand_upwards(struct vm_area_struct *vma, unsigned long address)
> /* Check that both stack segments have the same anon_vma? */
> }
>
> - if (mas_preallocate(&mas, vma, GFP_KERNEL))
> + if (mas_preallocate(&mas, GFP_KERNEL))
> return -ENOMEM;
>
> /* We must make sure the anon_vma is allocated. */
> @@ -2015,7 +2015,7 @@ int expand_downwards(struct vm_area_struct *vma, unsigned long address)
> return -ENOMEM;
> }
>
> - if (mas_preallocate(&mas, vma, GFP_KERNEL))
> + if (mas_preallocate(&mas, GFP_KERNEL))
> return -ENOMEM;
>
> /* We must make sure the anon_vma is allocated. */
> @@ -2307,7 +2307,7 @@ do_mas_align_munmap(struct ma_state *mas, struct vm_area_struct *vma,
> mt_init_flags(&mt_detach, MT_FLAGS_LOCK_EXTERN);
> mt_set_external_lock(&mt_detach, &mm->mmap_lock);
>
> - if (mas_preallocate(mas, vma, GFP_KERNEL))
> + if (mas_preallocate(mas, GFP_KERNEL))
> return -ENOMEM;
>
> mas->last = end - 1;
> @@ -2676,7 +2676,7 @@ unsigned long mmap_region(struct file *file, unsigned long addr,
> goto free_vma;
> }
>
> - if (mas_preallocate(&mas, vma, GFP_KERNEL)) {
> + if (mas_preallocate(&mas, GFP_KERNEL)) {
> error = -ENOMEM;
> if (file)
> goto close_and_free_vma;
> @@ -2949,7 +2949,7 @@ static int do_brk_flags(struct ma_state *mas, struct vm_area_struct *vma,
> can_vma_merge_after(vma, flags, NULL, NULL,
> addr >> PAGE_SHIFT, NULL_VM_UFFD_CTX, NULL)) {
> mas_set_range(mas, vma->vm_start, addr + len - 1);
> - if (mas_preallocate(mas, vma, GFP_KERNEL))
> + if (mas_preallocate(mas, GFP_KERNEL))
> goto unacct_fail;
>
> vma_adjust_trans_huge(vma, vma->vm_start, addr + len, 0);
> diff --git a/mm/nommu.c b/mm/nommu.c
> index 214c70e1d059..0befa4060aea 100644
> --- a/mm/nommu.c
> +++ b/mm/nommu.c
> @@ -602,7 +602,7 @@ static int add_vma_to_mm(struct mm_struct *mm, struct vm_area_struct *vma)
> {
> MA_STATE(mas, &mm->mm_mt, vma->vm_start, vma->vm_end);
>
> - if (mas_preallocate(&mas, vma, GFP_KERNEL)) {
> + if (mas_preallocate(&mas, GFP_KERNEL)) {
> pr_warn("Allocation of vma tree for process %d failed\n",
> current->pid);
> return -ENOMEM;
> @@ -633,7 +633,7 @@ static int delete_vma_from_mm(struct vm_area_struct *vma)
> {
> MA_STATE(mas, &vma->vm_mm->mm_mt, 0, 0);
>
> - if (mas_preallocate(&mas, vma, GFP_KERNEL)) {
> + if (mas_preallocate(&mas, GFP_KERNEL)) {
> pr_warn("Allocation of vma tree for process %d failed\n",
> current->pid);
> return -ENOMEM;
> @@ -1081,7 +1081,7 @@ unsigned long do_mmap(struct file *file,
> if (!vma)
> goto error_getting_vma;
>
> - if (mas_preallocate(&mas, vma, GFP_KERNEL))
> + if (mas_preallocate(&mas, GFP_KERNEL))
> goto error_maple_preallocate;
>
> region->vm_usage = 1;
> @@ -1358,7 +1358,7 @@ int split_vma(struct mm_struct *mm, struct vm_area_struct *vma,
> if (!new)
> goto err_vma_dup;
>
> - if (mas_preallocate(&mas, vma, GFP_KERNEL)) {
> + if (mas_preallocate(&mas, GFP_KERNEL)) {
> pr_warn("Allocation of vma tree for process %d failed\n",
> current->pid);
> goto err_mas_preallocate;
> diff --git a/tools/testing/radix-tree/maple.c b/tools/testing/radix-tree/maple.c
> index 81fa7ec2e66a..8170ef39d8c4 100644
> --- a/tools/testing/radix-tree/maple.c
> +++ b/tools/testing/radix-tree/maple.c
> @@ -35342,7 +35342,7 @@ static noinline void check_prealloc(struct maple_tree *mt)
> for (i = 0; i <= max; i++)
> mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
>
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated == 0);
> @@ -35351,18 +35351,18 @@ static noinline void check_prealloc(struct maple_tree *mt)
> allocated = mas_allocated(&mas);
> MT_BUG_ON(mt, allocated != 0);
>
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated == 0);
> MT_BUG_ON(mt, allocated != 1 + height * 3);
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> mas_destroy(&mas);
> allocated = mas_allocated(&mas);
> MT_BUG_ON(mt, allocated != 0);
>
>
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated == 0);
> @@ -35370,25 +35370,25 @@ static noinline void check_prealloc(struct maple_tree *mt)
> mn = mas_pop_node(&mas);
> MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1);
> ma_free_rcu(mn);
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> mas_destroy(&mas);
> allocated = mas_allocated(&mas);
> MT_BUG_ON(mt, allocated != 0);
>
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated == 0);
> MT_BUG_ON(mt, allocated != 1 + height * 3);
> mn = mas_pop_node(&mas);
> MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1);
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> mas_destroy(&mas);
> allocated = mas_allocated(&mas);
> MT_BUG_ON(mt, allocated != 0);
> ma_free_rcu(mn);
>
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated == 0);
> @@ -35397,12 +35397,12 @@ static noinline void check_prealloc(struct maple_tree *mt)
> MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1);
> mas_push_node(&mas, mn);
> MT_BUG_ON(mt, mas_allocated(&mas) != allocated);
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> mas_destroy(&mas);
> allocated = mas_allocated(&mas);
> MT_BUG_ON(mt, allocated != 0);
>
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated == 0);
> @@ -35410,21 +35410,21 @@ static noinline void check_prealloc(struct maple_tree *mt)
> mas_store_prealloc(&mas, ptr);
> MT_BUG_ON(mt, mas_allocated(&mas) != 0);
>
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated == 0);
> MT_BUG_ON(mt, allocated != 1 + height * 3);
> mas_store_prealloc(&mas, ptr);
> MT_BUG_ON(mt, mas_allocated(&mas) != 0);
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated == 0);
> MT_BUG_ON(mt, allocated != 1 + height * 3);
> mas_store_prealloc(&mas, ptr);
>
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated == 0);
> @@ -35432,14 +35432,14 @@ static noinline void check_prealloc(struct maple_tree *mt)
> mas_store_prealloc(&mas, ptr);
> MT_BUG_ON(mt, mas_allocated(&mas) != 0);
> mt_set_non_kernel(1);
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL & GFP_NOWAIT) == 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated != 0);
> mas_destroy(&mas);
>
>
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated == 0);
> @@ -35447,7 +35447,7 @@ static noinline void check_prealloc(struct maple_tree *mt)
> mas_store_prealloc(&mas, ptr);
> MT_BUG_ON(mt, mas_allocated(&mas) != 0);
> mt_set_non_kernel(1);
> - MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0);
> + MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL & GFP_NOWAIT) == 0);
> allocated = mas_allocated(&mas);
> height = mas_mt_height(&mas);
> MT_BUG_ON(mt, allocated != 0);
> --
> 2.34.1
>