From b02fcc082a4ae1675a7f70a017ca4926286271ad Mon Sep 17 00:00:00 2001 From: "Liam R. Howlett" Date: Mon, 16 Dec 2024 14:01:13 -0500 Subject: [PATCH] test_maple_tree: test exhausted upper limit of mtree_alloc_cyclic() When the upper bound of the search is exhausted, the maple state may be returned in an error state of -EBUSY. This means maple state needs to be reset before the second search in mas_alloc_cylic() to ensure the search happens. This test ensures the issue is not recreated. Link: https://lkml.kernel.org/r/20241216190113.1226145-3-Liam.Howlett@oracle.com Signed-off-by: Liam R. Howlett Reviewed-by: Yang Erkun Cc: Christian Brauner Cc: Chuck Lever says: Signed-off-by: Andrew Morton --- lib/test_maple_tree.c | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/lib/test_maple_tree.c b/lib/test_maple_tree.c index 72bda304b595..13e2a10d7554 100644 --- a/lib/test_maple_tree.c +++ b/lib/test_maple_tree.c @@ -3738,6 +3738,34 @@ static noinline void __init alloc_cyclic_testing(struct maple_tree *mt) } mtree_destroy(mt); + + /* + * Issue with reverse search was discovered + * https://lore.kernel.org/all/20241216060600.287B4C4CED0@smtp.kernel.org/ + * Exhausting the allocation area and forcing the search to wrap needs a + * mas_reset() in mas_alloc_cyclic(). + */ + next = 0; + mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); + for (int i = 0; i < 1023; i++) { + mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL); + MT_BUG_ON(mt, i != location - 2); + MT_BUG_ON(mt, i != next - 3); + MT_BUG_ON(mt, mtree_load(mt, location) != mt); + } + mtree_erase(mt, 123); + MT_BUG_ON(mt, mtree_load(mt, 123) != NULL); + mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL); + MT_BUG_ON(mt, 123 != location); + MT_BUG_ON(mt, 124 != next); + MT_BUG_ON(mt, mtree_load(mt, location) != mt); + mtree_erase(mt, 100); + mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL); + MT_BUG_ON(mt, 100 != location); + MT_BUG_ON(mt, 101 != next); + MT_BUG_ON(mt, mtree_load(mt, location) != mt); + mtree_destroy(mt); + /* Overflow test */ next = ULONG_MAX - 1; ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);