2019-08-11 18:47:13

by Uladzislau Rezki

[permalink] [raw]
Subject: [PATCH 0/2] some cleanups related to RB_DECLARE_CALLBACKS_MAX

Recently we have got RB_DECLARE_CALLBACKS_MAX template that is supposed
to be used in case of having an augmented value as scalar value. First
patch just simplifies the *_compute_max() callback by using max3()
macro that makes the code more transparent, i think. No functional changes.

Second patch reuses RB_DECLARE_CALLBACKS_MAX template's internal functionality,
that is generated to manage augment red-black tree instead of using our own and
the same logic in vmalloc. Just get rid of duplication. No functional changes.

Also i have open question related to validating of the augment tree, i mean
in case of debugging to check that nodes are maintained correctly. Please
have a look here: https://lkml.org/lkml/2019/7/29/304

Basically we can add one more function under RB_DECLARE_CALLBACKS_MAX template
making it public that checks a tree and its augmented nodes. At least i see
two users where it can be used: vmalloc and lib/rbtree_test.c.

Appreciate for any comments.

Uladzislau Rezki (Sony) (2):
augmented rbtree: use max3() in the *_compute_max() function
mm/vmalloc: use generated callback to populate subtree_max_size

include/linux/rbtree_augmented.h | 40 +++++++++++++++++-----------------
mm/vmalloc.c | 31 +-------------------------
tools/include/linux/rbtree_augmented.h | 40 +++++++++++++++++-----------------
3 files changed, 41 insertions(+), 70 deletions(-)

--
2.11.0


2019-08-11 18:47:40

by Uladzislau Rezki

[permalink] [raw]
Subject: [PATCH 2/2] mm/vmalloc: use generated callback to populate subtree_max_size

RB_DECLARE_CALLBACKS_MAX defines its own callback to update the
augmented subtree information after a node is modified. It makes
sense to use it instead of our own propagate implementation.

Apart of that, in case of using generated callback we can eliminate
compute_subtree_max_size() function and get rid of duplication.

Signed-off-by: Uladzislau Rezki (Sony) <[email protected]>
---
mm/vmalloc.c | 31 +------------------------------
1 file changed, 1 insertion(+), 30 deletions(-)

diff --git a/mm/vmalloc.c b/mm/vmalloc.c
index b8101030f79e..e03444598ae1 100644
--- a/mm/vmalloc.c
+++ b/mm/vmalloc.c
@@ -385,17 +385,6 @@ get_subtree_max_size(struct rb_node *node)
return va ? va->subtree_max_size : 0;
}

-/*
- * Gets called when remove the node and rotate.
- */
-static __always_inline unsigned long
-compute_subtree_max_size(struct vmap_area *va)
-{
- return max3(va_size(va),
- get_subtree_max_size(va->rb_node.rb_left),
- get_subtree_max_size(va->rb_node.rb_right));
-}
-
RB_DECLARE_CALLBACKS_MAX(static, free_vmap_area_rb_augment_cb,
struct vmap_area, rb_node, unsigned long, subtree_max_size, va_size)

@@ -623,25 +612,7 @@ augment_tree_propagate_check(struct rb_node *n)
static __always_inline void
augment_tree_propagate_from(struct vmap_area *va)
{
- struct rb_node *node = &va->rb_node;
- unsigned long new_va_sub_max_size;
-
- while (node) {
- va = rb_entry(node, struct vmap_area, rb_node);
- new_va_sub_max_size = compute_subtree_max_size(va);
-
- /*
- * If the newly calculated maximum available size of the
- * subtree is equal to the current one, then it means that
- * the tree is propagated correctly. So we have to stop at
- * this point to save cycles.
- */
- if (va->subtree_max_size == new_va_sub_max_size)
- break;
-
- va->subtree_max_size = new_va_sub_max_size;
- node = rb_parent(&va->rb_node);
- }
+ free_vmap_area_rb_augment_cb_propagate(&va->rb_node, NULL);

#if DEBUG_AUGMENT_PROPAGATE_CHECK
augment_tree_propagate_check(free_vmap_area_root.rb_node);
--
2.11.0

2019-08-11 18:48:24

by Uladzislau Rezki

[permalink] [raw]
Subject: [PATCH 1/2] augmented rbtree: use max3() in the *_compute_max() function

Recently there was introduced RB_DECLARE_CALLBACKS_MAX template.
One of the callback, to be more specific *_compute_max(), calculates
a maximum scalar value of node against its left/right sub-tree.

To simplify the code and improve readability we can switch and
make use of max3() macro that makes the code more transparent.

Signed-off-by: Uladzislau Rezki (Sony) <[email protected]>
---
include/linux/rbtree_augmented.h | 40 +++++++++++++++++-----------------
tools/include/linux/rbtree_augmented.h | 40 +++++++++++++++++-----------------
2 files changed, 40 insertions(+), 40 deletions(-)

diff --git a/include/linux/rbtree_augmented.h b/include/linux/rbtree_augmented.h
index fdd421b8d9ae..fb29d6627646 100644
--- a/include/linux/rbtree_augmented.h
+++ b/include/linux/rbtree_augmented.h
@@ -119,26 +119,26 @@ RBSTATIC const struct rb_augment_callbacks RBNAME = { \

#define RB_DECLARE_CALLBACKS_MAX(RBSTATIC, RBNAME, RBSTRUCT, RBFIELD, \
RBTYPE, RBAUGMENTED, RBCOMPUTE) \
-static inline bool RBNAME ## _compute_max(RBSTRUCT *node, bool exit) \
-{ \
- RBSTRUCT *child; \
- RBTYPE max = RBCOMPUTE(node); \
- if (node->RBFIELD.rb_left) { \
- child = rb_entry(node->RBFIELD.rb_left, RBSTRUCT, RBFIELD); \
- if (child->RBAUGMENTED > max) \
- max = child->RBAUGMENTED; \
- } \
- if (node->RBFIELD.rb_right) { \
- child = rb_entry(node->RBFIELD.rb_right, RBSTRUCT, RBFIELD); \
- if (child->RBAUGMENTED > max) \
- max = child->RBAUGMENTED; \
- } \
- if (exit && node->RBAUGMENTED == max) \
- return true; \
- node->RBAUGMENTED = max; \
- return false; \
-} \
-RB_DECLARE_CALLBACKS(RBSTATIC, RBNAME, \
+static inline RBTYPE RBNAME ## _get_max(struct rb_node *node) \
+{ \
+ RBSTRUCT *tmp; \
+ \
+ tmp = rb_entry_safe(node, RBSTRUCT, RBFIELD); \
+ return tmp ? tmp->RBAUGMENTED : 0; \
+} \
+ \
+static inline bool RBNAME ## _compute_max(RBSTRUCT *node, bool exit) \
+{ \
+ RBTYPE max = max3(RBCOMPUTE(node), \
+ RBNAME ## _get_max(node->RBFIELD.rb_left), \
+ RBNAME ## _get_max(node->RBFIELD.rb_right)); \
+ \
+ if (exit && node->RBAUGMENTED == max) \
+ return true; \
+ node->RBAUGMENTED = max; \
+ return false; \
+} \
+RB_DECLARE_CALLBACKS(RBSTATIC, RBNAME, \
RBSTRUCT, RBFIELD, RBAUGMENTED, RBNAME ## _compute_max)


diff --git a/tools/include/linux/rbtree_augmented.h b/tools/include/linux/rbtree_augmented.h
index 381aa948610d..3b8284479e98 100644
--- a/tools/include/linux/rbtree_augmented.h
+++ b/tools/include/linux/rbtree_augmented.h
@@ -121,26 +121,26 @@ RBSTATIC const struct rb_augment_callbacks RBNAME = { \

#define RB_DECLARE_CALLBACKS_MAX(RBSTATIC, RBNAME, RBSTRUCT, RBFIELD, \
RBTYPE, RBAUGMENTED, RBCOMPUTE) \
-static inline bool RBNAME ## _compute_max(RBSTRUCT *node, bool exit) \
-{ \
- RBSTRUCT *child; \
- RBTYPE max = RBCOMPUTE(node); \
- if (node->RBFIELD.rb_left) { \
- child = rb_entry(node->RBFIELD.rb_left, RBSTRUCT, RBFIELD); \
- if (child->RBAUGMENTED > max) \
- max = child->RBAUGMENTED; \
- } \
- if (node->RBFIELD.rb_right) { \
- child = rb_entry(node->RBFIELD.rb_right, RBSTRUCT, RBFIELD); \
- if (child->RBAUGMENTED > max) \
- max = child->RBAUGMENTED; \
- } \
- if (exit && node->RBAUGMENTED == max) \
- return true; \
- node->RBAUGMENTED = max; \
- return false; \
-} \
-RB_DECLARE_CALLBACKS(RBSTATIC, RBNAME, \
+static inline RBTYPE RBNAME ## _get_max(struct rb_node *node) \
+{ \
+ RBSTRUCT *tmp; \
+ \
+ tmp = rb_entry_safe(node, RBSTRUCT, RBFIELD); \
+ return tmp ? tmp->RBAUGMENTED : 0; \
+} \
+ \
+static inline bool RBNAME ## _compute_max(RBSTRUCT *node, bool exit) \
+{ \
+ RBTYPE max = max3(RBCOMPUTE(node), \
+ RBNAME ## _get_max(node->RBFIELD.rb_left), \
+ RBNAME ## _get_max(node->RBFIELD.rb_right)); \
+ \
+ if (exit && node->RBAUGMENTED == max) \
+ return true; \
+ node->RBAUGMENTED = max; \
+ return false; \
+} \
+RB_DECLARE_CALLBACKS(RBSTATIC, RBNAME, \
RBSTRUCT, RBFIELD, RBAUGMENTED, RBNAME ## _compute_max)


--
2.11.0

2019-08-12 00:41:46

by Michel Lespinasse

[permalink] [raw]
Subject: Re: [PATCH 2/2] mm/vmalloc: use generated callback to populate subtree_max_size

On Sun, Aug 11, 2019 at 11:46 AM Uladzislau Rezki (Sony)
<[email protected]> wrote:
> RB_DECLARE_CALLBACKS_MAX defines its own callback to update the
> augmented subtree information after a node is modified. It makes
> sense to use it instead of our own propagate implementation.
>
> Apart of that, in case of using generated callback we can eliminate
> compute_subtree_max_size() function and get rid of duplication.
>
> Signed-off-by: Uladzislau Rezki (Sony) <[email protected]>

Reviewed-by: Michel Lespinasse <[email protected]>

Love it. Thanks a lot for the cleanup!

2019-08-12 01:39:35

by Michel Lespinasse

[permalink] [raw]
Subject: Re: [PATCH 1/2] augmented rbtree: use max3() in the *_compute_max() function

On Sun, Aug 11, 2019 at 11:46 AM Uladzislau Rezki (Sony)
<[email protected]> wrote:
>
> Recently there was introduced RB_DECLARE_CALLBACKS_MAX template.
> One of the callback, to be more specific *_compute_max(), calculates
> a maximum scalar value of node against its left/right sub-tree.
>
> To simplify the code and improve readability we can switch and
> make use of max3() macro that makes the code more transparent.
>
> Signed-off-by: Uladzislau Rezki (Sony) <[email protected]>

Thanks. The change is correct but I think I prefer it the "before"
version. My reasons are:

- I don't have a strong style preference either way - it's the same
amount of code either way, admittedly more modular in your proposal,
but also with more indirection (compute_max refers to get_max and
max3). The indirection doesn't hinder readability but IMO it makes it
harder to be confident that the compiler will generate quality code,
compared to the "before" approach which just lays down all the pieces
in a linear way.

- A quick check shows that the proposed change generates larger code
for mm/interval_tree.o:
2757 0 0 2757 ac5 mm/interval_tree.o
2533 0 0 2533 9e5 mm/interval_tree.o.orig
This does not happen for every RB_DECLARE_CALLBACKS_MAX use,
lib/interval_tree.o in particular seems to be fine. But it does go
towards my gut feeling that the change trusts the compiler/optimizer
more than I want to.

- Slight loss of generality. The "before" code only assumes that the
RBAUGMENTED field can be compared using "<" ; the "after" code also
assumes that the minimum value is 0. While this covers the current
uses, I would prefer not to have that limitation.

2019-08-12 07:15:13

by Michel Lespinasse

[permalink] [raw]
Subject: Re: [PATCH 0/2] some cleanups related to RB_DECLARE_CALLBACKS_MAX

On Sun, Aug 11, 2019 at 11:46 AM Uladzislau Rezki (Sony) <[email protected]> wrote:
> Also i have open question related to validating of the augment tree, i mean
> in case of debugging to check that nodes are maintained correctly. Please
> have a look here: https://lkml.org/lkml/2019/7/29/304
>
> Basically we can add one more function under RB_DECLARE_CALLBACKS_MAX template
> making it public that checks a tree and its augmented nodes. At least i see
> two users where it can be used: vmalloc and lib/rbtree_test.c.

I think it would be sufficient to call RBCOMPUTE(node, true) on every
node and check the return value ?

Something like the following (probably applicable in other files too):

---------------------------------- 8< ------------------------------------

augmented rbtree: use generated compute_max function for debug checks

In debug code, use the generated compute_max function instead of
reimplementing similar functionality in multiple places.

Signed-off-by: Michel Lespinasse <[email protected]>
---
lib/rbtree_test.c | 15 +-------------
mm/mmap.c | 26 +++--------------------
mm/vmalloc.c | 53 +++++++----------------------------------------
3 files changed, 12 insertions(+), 82 deletions(-)

diff --git a/lib/rbtree_test.c b/lib/rbtree_test.c
index 41ae3c7570d3..a5a04e820f77 100644
--- a/lib/rbtree_test.c
+++ b/lib/rbtree_test.c
@@ -222,20 +222,7 @@ static void check_augmented(int nr_nodes)
check(nr_nodes);
for (rb = rb_first(&root.rb_root); rb; rb = rb_next(rb)) {
struct test_node *node = rb_entry(rb, struct test_node, rb);
- u32 subtree, max = node->val;
- if (node->rb.rb_left) {
- subtree = rb_entry(node->rb.rb_left, struct test_node,
- rb)->augmented;
- if (max < subtree)
- max = subtree;
- }
- if (node->rb.rb_right) {
- subtree = rb_entry(node->rb.rb_right, struct test_node,
- rb)->augmented;
- if (max < subtree)
- max = subtree;
- }
- WARN_ON_ONCE(node->augmented != max);
+ WARN_ON_ONCE(!augment_callbacks_compute_max(node, true));
}
}

diff --git a/mm/mmap.c b/mm/mmap.c
index 24f0772d6afd..d6d23e6c2d10 100644
--- a/mm/mmap.c
+++ b/mm/mmap.c
@@ -311,24 +311,6 @@ static inline unsigned long vma_compute_gap(struct vm_area_struct *vma)
}

#ifdef CONFIG_DEBUG_VM_RB
-static unsigned long vma_compute_subtree_gap(struct vm_area_struct *vma)
-{
- unsigned long max = vma_compute_gap(vma), subtree_gap;
- if (vma->vm_rb.rb_left) {
- subtree_gap = rb_entry(vma->vm_rb.rb_left,
- struct vm_area_struct, vm_rb)->rb_subtree_gap;
- if (subtree_gap > max)
- max = subtree_gap;
- }
- if (vma->vm_rb.rb_right) {
- subtree_gap = rb_entry(vma->vm_rb.rb_right,
- struct vm_area_struct, vm_rb)->rb_subtree_gap;
- if (subtree_gap > max)
- max = subtree_gap;
- }
- return max;
-}
-
static int browse_rb(struct mm_struct *mm)
{
struct rb_root *root = &mm->mm_rb;
@@ -355,10 +337,8 @@ static int browse_rb(struct mm_struct *mm)
bug = 1;
}
spin_lock(&mm->page_table_lock);
- if (vma->rb_subtree_gap != vma_compute_subtree_gap(vma)) {
- pr_emerg("free gap %lx, correct %lx\n",
- vma->rb_subtree_gap,
- vma_compute_subtree_gap(vma));
+ if (!vma_gap_callbacks_compute_max(vma, true)) {
+ pr_emerg("wrong subtree gap in vma %p\n", vma);
bug = 1;
}
spin_unlock(&mm->page_table_lock);
@@ -385,7 +365,7 @@ static void validate_mm_rb(struct rb_root *root, struct vm_area_struct *ignore)
struct vm_area_struct *vma;
vma = rb_entry(nd, struct vm_area_struct, vm_rb);
VM_BUG_ON_VMA(vma != ignore &&
- vma->rb_subtree_gap != vma_compute_subtree_gap(vma),
+ !vma_gap_callbacks_compute_max(vma, true),
vma);
}
}
diff --git a/mm/vmalloc.c b/mm/vmalloc.c
index f7c61accb0e2..ea23ccaf70fc 100644
--- a/mm/vmalloc.c
+++ b/mm/vmalloc.c
@@ -553,48 +553,6 @@ unlink_va(struct vmap_area *va, struct rb_root *root)
RB_CLEAR_NODE(&va->rb_node);
}

-#if DEBUG_AUGMENT_PROPAGATE_CHECK
-static void
-augment_tree_propagate_check(struct rb_node *n)
-{
- struct vmap_area *va;
- struct rb_node *node;
- unsigned long size;
- bool found = false;
-
- if (n == NULL)
- return;
-
- va = rb_entry(n, struct vmap_area, rb_node);
- size = va->subtree_max_size;
- node = n;
-
- while (node) {
- va = rb_entry(node, struct vmap_area, rb_node);
-
- if (get_subtree_max_size(node->rb_left) == size) {
- node = node->rb_left;
- } else {
- if (va_size(va) == size) {
- found = true;
- break;
- }
-
- node = node->rb_right;
- }
- }
-
- if (!found) {
- va = rb_entry(n, struct vmap_area, rb_node);
- pr_emerg("tree is corrupted: %lu, %lu\n",
- va_size(va), va->subtree_max_size);
- }
-
- augment_tree_propagate_check(n->rb_left);
- augment_tree_propagate_check(n->rb_right);
-}
-#endif
-
/*
* This function populates subtree_max_size from bottom to upper
* levels starting from VA point. The propagation must be done
@@ -645,9 +603,14 @@ augment_tree_propagate_from(struct vmap_area *va)
node = rb_parent(&va->rb_node);
}

-#if DEBUG_AUGMENT_PROPAGATE_CHECK
- augment_tree_propagate_check(free_vmap_area_root.rb_node);
-#endif
+ if (DEBUG_AUGMENT_PROPAGATE_CHECK) {
+ struct vmap_area *va;
+
+ list_for_each_entry(va, &free_vmap_area_list, list) {
+ WARN_ON(!free_vmap_area_rb_augment_cb_compute_max(
+ va, true));
+ }
+ }
}

static void
--
2.23.0.rc1.153.gdeed80330f-goog

2019-08-13 09:02:34

by Uladzislau Rezki

[permalink] [raw]
Subject: Re: [PATCH 1/2] augmented rbtree: use max3() in the *_compute_max() function

> On Sun, Aug 11, 2019 at 11:46 AM Uladzislau Rezki (Sony)
> <[email protected]> wrote:
> >
> > Recently there was introduced RB_DECLARE_CALLBACKS_MAX template.
> > One of the callback, to be more specific *_compute_max(), calculates
> > a maximum scalar value of node against its left/right sub-tree.
> >
> > To simplify the code and improve readability we can switch and
> > make use of max3() macro that makes the code more transparent.
> >
> > Signed-off-by: Uladzislau Rezki (Sony) <[email protected]>
>
> Thanks. The change is correct but I think I prefer it the "before"
> version. My reasons are:
>
> - I don't have a strong style preference either way - it's the same
> amount of code either way, admittedly more modular in your proposal,
> but also with more indirection (compute_max refers to get_max and
> max3). The indirection doesn't hinder readability but IMO it makes it
> harder to be confident that the compiler will generate quality code,
> compared to the "before" approach which just lays down all the pieces
> in a linear way.
Thank you for your comments. As for compiler and what can be generated
as a result depends on arch, etc, so i agree here. "inline" is a hint only.
But it can be rewritten. One way is to use __always_inline another one is:

<snip>
RBTYPE max = max3(RBCOMPUTE(node), \
node->RBFIELD.rb_left ? \
rb_entry(node->RBFIELD.rb_left, \
RBSTRUCT, RBFIELD)->RBAUGMENTED:0, \
node->RBFIELD.rb_right ? \
rb_entry(node->RBFIELD.rb_right, \
RBSTRUCT, RBFIELD)->RBAUGMENTED:0);
<snip>

i.e. directly embed an access to the left/right nodes into max3().
That way we can get rid of extra "child" variable and to have a liner
code as "before" variant.

Again, i am not interested in just pushing this change, the aim was
to make it more readable for others and that is it.

>
> - A quick check shows that the proposed change generates larger code
> for mm/interval_tree.o:
> 2757 0 0 2757 ac5 mm/interval_tree.o
> 2533 0 0 2533 9e5 mm/interval_tree.o.orig
> This does not happen for every RB_DECLARE_CALLBACKS_MAX use,
> lib/interval_tree.o in particular seems to be fine. But it does go
> towards my gut feeling that the change trusts the compiler/optimizer
> more than I want to.
>
I see your point. Indeed the generated code is bit bigger with the change,
however with above modification it improves the situation and becomes:
<snip>
284544 Aug 13 09:53 interval_tree.o
283192 Aug 13 09:57 interval_tree.o.orig
<snip>

but is still a bit higher. If we care about that, then i will drop this patch,
because "before" code is better in that context.

> - Slight loss of generality. The "before" code only assumes that the
> RBAUGMENTED field can be compared using "<" ; the "after" code also
> assumes that the minimum value is 0. While this covers the current
> uses, I would prefer not to have that limitation.
If we care about negative augmented values, then we should stick to
"before" code. Agree here. If you have any ideas how to extend and
cover negative cases, please let me know. Otherwise we can drop this
change and do not pay much attention at it.

Thank you.

--
Vlad Rezki

2019-08-13 09:04:19

by Uladzislau Rezki

[permalink] [raw]
Subject: Re: [PATCH 2/2] mm/vmalloc: use generated callback to populate subtree_max_size

On Sun, Aug 11, 2019 at 05:39:23PM -0700, Michel Lespinasse wrote:
> On Sun, Aug 11, 2019 at 11:46 AM Uladzislau Rezki (Sony)
> <[email protected]> wrote:
> > RB_DECLARE_CALLBACKS_MAX defines its own callback to update the
> > augmented subtree information after a node is modified. It makes
> > sense to use it instead of our own propagate implementation.
> >
> > Apart of that, in case of using generated callback we can eliminate
> > compute_subtree_max_size() function and get rid of duplication.
> >
> > Signed-off-by: Uladzislau Rezki (Sony) <[email protected]>
>
> Reviewed-by: Michel Lespinasse <[email protected]>
>
> Love it. Thanks a lot for the cleanup!
Thank you for review!

--
Vlad Rezki

2019-08-13 09:31:03

by Uladzislau Rezki

[permalink] [raw]
Subject: Re: [PATCH 0/2] some cleanups related to RB_DECLARE_CALLBACKS_MAX

>
> I think it would be sufficient to call RBCOMPUTE(node, true) on every
> node and check the return value ?
>
Yes, that is enough for sure. The only way i was thinking about to make it
public, because checking the tree for MAX is generic for every users which
use RB_DECLARE_CALLBACKS_MAX template. Something like:

validate_rb_max_tree() {
for (nd = rb_first(root); nd; nd = rb_next(nd)) {
fooo = rb_entry(nd, struct sometinhf, rb_field);
WARN_ON(!*_compute_max(foo, true);
}
}

and call this public function under debug code. But i do not have strong
opinion here and it is probably odd. Anyway i am fine with your change.

There is small comment below:

>
> Something like the following (probably applicable in other files too):
>
> ---------------------------------- 8< ------------------------------------
>
> augmented rbtree: use generated compute_max function for debug checks
>
> In debug code, use the generated compute_max function instead of
> reimplementing similar functionality in multiple places.
>
> Signed-off-by: Michel Lespinasse <[email protected]>
> ---
> lib/rbtree_test.c | 15 +-------------
> mm/mmap.c | 26 +++--------------------
> mm/vmalloc.c | 53 +++++++----------------------------------------
> 3 files changed, 12 insertions(+), 82 deletions(-)
>
> diff --git a/lib/rbtree_test.c b/lib/rbtree_test.c
> index 41ae3c7570d3..a5a04e820f77 100644
> --- a/lib/rbtree_test.c
> +++ b/lib/rbtree_test.c
> @@ -222,20 +222,7 @@ static void check_augmented(int nr_nodes)
> check(nr_nodes);
> for (rb = rb_first(&root.rb_root); rb; rb = rb_next(rb)) {
> struct test_node *node = rb_entry(rb, struct test_node, rb);
> - u32 subtree, max = node->val;
> - if (node->rb.rb_left) {
> - subtree = rb_entry(node->rb.rb_left, struct test_node,
> - rb)->augmented;
> - if (max < subtree)
> - max = subtree;
> - }
> - if (node->rb.rb_right) {
> - subtree = rb_entry(node->rb.rb_right, struct test_node,
> - rb)->augmented;
> - if (max < subtree)
> - max = subtree;
> - }
> - WARN_ON_ONCE(node->augmented != max);
> + WARN_ON_ONCE(!augment_callbacks_compute_max(node, true));
> }
> }
>
> diff --git a/mm/mmap.c b/mm/mmap.c
> index 24f0772d6afd..d6d23e6c2d10 100644
> --- a/mm/mmap.c
> +++ b/mm/mmap.c
> @@ -311,24 +311,6 @@ static inline unsigned long vma_compute_gap(struct vm_area_struct *vma)
> }
>
> #ifdef CONFIG_DEBUG_VM_RB
> -static unsigned long vma_compute_subtree_gap(struct vm_area_struct *vma)
> -{
> - unsigned long max = vma_compute_gap(vma), subtree_gap;
> - if (vma->vm_rb.rb_left) {
> - subtree_gap = rb_entry(vma->vm_rb.rb_left,
> - struct vm_area_struct, vm_rb)->rb_subtree_gap;
> - if (subtree_gap > max)
> - max = subtree_gap;
> - }
> - if (vma->vm_rb.rb_right) {
> - subtree_gap = rb_entry(vma->vm_rb.rb_right,
> - struct vm_area_struct, vm_rb)->rb_subtree_gap;
> - if (subtree_gap > max)
> - max = subtree_gap;
> - }
> - return max;
> -}
> -
> static int browse_rb(struct mm_struct *mm)
> {
> struct rb_root *root = &mm->mm_rb;
> @@ -355,10 +337,8 @@ static int browse_rb(struct mm_struct *mm)
> bug = 1;
> }
> spin_lock(&mm->page_table_lock);
> - if (vma->rb_subtree_gap != vma_compute_subtree_gap(vma)) {
> - pr_emerg("free gap %lx, correct %lx\n",
> - vma->rb_subtree_gap,
> - vma_compute_subtree_gap(vma));
> + if (!vma_gap_callbacks_compute_max(vma, true)) {
> + pr_emerg("wrong subtree gap in vma %p\n", vma);
> bug = 1;
> }
> spin_unlock(&mm->page_table_lock);
> @@ -385,7 +365,7 @@ static void validate_mm_rb(struct rb_root *root, struct vm_area_struct *ignore)
> struct vm_area_struct *vma;
> vma = rb_entry(nd, struct vm_area_struct, vm_rb);
> VM_BUG_ON_VMA(vma != ignore &&
> - vma->rb_subtree_gap != vma_compute_subtree_gap(vma),
> + !vma_gap_callbacks_compute_max(vma, true),
> vma);
> }
> }
> diff --git a/mm/vmalloc.c b/mm/vmalloc.c
> index f7c61accb0e2..ea23ccaf70fc 100644
> --- a/mm/vmalloc.c
> +++ b/mm/vmalloc.c
> @@ -553,48 +553,6 @@ unlink_va(struct vmap_area *va, struct rb_root *root)
> RB_CLEAR_NODE(&va->rb_node);
> }
>
> -#if DEBUG_AUGMENT_PROPAGATE_CHECK
> -static void
> -augment_tree_propagate_check(struct rb_node *n)
> -{
> - struct vmap_area *va;
> - struct rb_node *node;
> - unsigned long size;
> - bool found = false;
> -
> - if (n == NULL)
> - return;
> -
> - va = rb_entry(n, struct vmap_area, rb_node);
> - size = va->subtree_max_size;
> - node = n;
> -
> - while (node) {
> - va = rb_entry(node, struct vmap_area, rb_node);
> -
> - if (get_subtree_max_size(node->rb_left) == size) {
> - node = node->rb_left;
> - } else {
> - if (va_size(va) == size) {
> - found = true;
> - break;
> - }
> -
> - node = node->rb_right;
> - }
> - }
> -
> - if (!found) {
> - va = rb_entry(n, struct vmap_area, rb_node);
> - pr_emerg("tree is corrupted: %lu, %lu\n",
> - va_size(va), va->subtree_max_size);
> - }
> -
> - augment_tree_propagate_check(n->rb_left);
> - augment_tree_propagate_check(n->rb_right);
> -}
> -#endif
> -
> /*
> * This function populates subtree_max_size from bottom to upper
> * levels starting from VA point. The propagation must be done
> @@ -645,9 +603,14 @@ augment_tree_propagate_from(struct vmap_area *va)
> node = rb_parent(&va->rb_node);
> }
>
> -#if DEBUG_AUGMENT_PROPAGATE_CHECK
> - augment_tree_propagate_check(free_vmap_area_root.rb_node);
> -#endif
> + if (DEBUG_AUGMENT_PROPAGATE_CHECK) {
> + struct vmap_area *va;
> +
> + list_for_each_entry(va, &free_vmap_area_list, list) {
> + WARN_ON(!free_vmap_area_rb_augment_cb_compute_max(
> + va, true));
> + }
> + }
> }
>
The object of validating is the tree, therefore it makes sense to go with it,
instead of iterating over the list.

Thank you!

--
Vlad Rezki