Skip to content

Commit 3a20cb1

Browse files
joergroedelrafaeljw
authored andcommitted
PM / Hibernate: Implement position keeping in radix tree
Add code to remember the last position that was requested in the radix tree. Use it as a cache for faster linear walking of the bitmap in the memory_bm_rtree_next_pfn() function which is also added with this patch. Signed-off-by: Joerg Roedel <jroedel@suse.de> Signed-off-by: Rafael J. Wysocki <rafael.j.wysocki@intel.com>
1 parent 07a3382 commit 3a20cb1

File tree

1 file changed

+98
-0
lines changed

1 file changed

+98
-0
lines changed

kernel/power/snapshot.c

Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -309,6 +309,11 @@ struct mem_zone_bm_rtree {
309309
struct bm_position {
310310
struct bm_block *block;
311311
int bit;
312+
313+
struct mem_zone_bm_rtree *zone;
314+
struct rtree_node *node;
315+
unsigned long node_pfn;
316+
int node_bit;
312317
};
313318

314319
struct memory_bitmap {
@@ -487,6 +492,13 @@ static void memory_bm_position_reset(struct memory_bitmap *bm)
487492
{
488493
bm->cur.block = list_entry(bm->blocks.next, struct bm_block, hook);
489494
bm->cur.bit = 0;
495+
496+
bm->cur.zone = list_entry(bm->zones.next, struct mem_zone_bm_rtree,
497+
list);
498+
bm->cur.node = list_entry(bm->cur.zone->leaves.next,
499+
struct rtree_node, list);
500+
bm->cur.node_pfn = 0;
501+
bm->cur.node_bit = 0;
490502
}
491503

492504
static void memory_bm_free(struct memory_bitmap *bm, int clear_nosave_free);
@@ -734,6 +746,11 @@ static int memory_rtree_find_bit(struct memory_bitmap *bm, unsigned long pfn,
734746
struct rtree_node *node;
735747
int i, block_nr;
736748

749+
zone = bm->cur.zone;
750+
751+
if (pfn >= zone->start_pfn && pfn < zone->end_pfn)
752+
goto zone_found;
753+
737754
zone = NULL;
738755

739756
/* Find the right zone */
@@ -747,10 +764,16 @@ static int memory_rtree_find_bit(struct memory_bitmap *bm, unsigned long pfn,
747764
if (!zone)
748765
return -EFAULT;
749766

767+
zone_found:
750768
/*
751769
* We have a zone. Now walk the radix tree to find the leave
752770
* node for our pfn.
753771
*/
772+
773+
node = bm->cur.node;
774+
if (((pfn - zone->start_pfn) & ~BM_BLOCK_MASK) == bm->cur.node_pfn)
775+
goto node_found;
776+
754777
node = zone->rtree;
755778
block_nr = (pfn - zone->start_pfn) >> BM_BLOCK_SHIFT;
756779

@@ -763,6 +786,12 @@ static int memory_rtree_find_bit(struct memory_bitmap *bm, unsigned long pfn,
763786
node = (struct rtree_node *)node->data[index];
764787
}
765788

789+
node_found:
790+
/* Update last position */
791+
bm->cur.zone = zone;
792+
bm->cur.node = node;
793+
bm->cur.node_pfn = (pfn - zone->start_pfn) & ~BM_BLOCK_MASK;
794+
766795
/* Set return values */
767796
*addr = node->data;
768797
*bit_nr = (pfn - zone->start_pfn) & BM_BLOCK_MASK;
@@ -860,11 +889,16 @@ static bool memory_bm_pfn_present(struct memory_bitmap *bm, unsigned long pfn)
860889
* this function.
861890
*/
862891

892+
static unsigned long memory_bm_rtree_next_pfn(struct memory_bitmap *bm);
893+
863894
static unsigned long memory_bm_next_pfn(struct memory_bitmap *bm)
864895
{
896+
unsigned long rtree_pfn;
865897
struct bm_block *bb;
866898
int bit;
867899

900+
rtree_pfn = memory_bm_rtree_next_pfn(bm);
901+
868902
bb = bm->cur.block;
869903
do {
870904
bit = bm->cur.bit;
@@ -878,13 +912,77 @@ static unsigned long memory_bm_next_pfn(struct memory_bitmap *bm)
878912
} while (&bb->hook != &bm->blocks);
879913

880914
memory_bm_position_reset(bm);
915+
WARN_ON_ONCE(rtree_pfn != BM_END_OF_MAP);
881916
return BM_END_OF_MAP;
882917

883918
Return_pfn:
919+
WARN_ON_ONCE(bb->start_pfn + bit != rtree_pfn);
884920
bm->cur.bit = bit + 1;
885921
return bb->start_pfn + bit;
886922
}
887923

924+
/*
925+
* rtree_next_node - Jumps to the next leave node
926+
*
927+
* Sets the position to the beginning of the next node in the
928+
* memory bitmap. This is either the next node in the current
929+
* zone's radix tree or the first node in the radix tree of the
930+
* next zone.
931+
*
932+
* Returns true if there is a next node, false otherwise.
933+
*/
934+
static bool rtree_next_node(struct memory_bitmap *bm)
935+
{
936+
bm->cur.node = list_entry(bm->cur.node->list.next,
937+
struct rtree_node, list);
938+
if (&bm->cur.node->list != &bm->cur.zone->leaves) {
939+
bm->cur.node_pfn += BM_BITS_PER_BLOCK;
940+
bm->cur.node_bit = 0;
941+
return true;
942+
}
943+
944+
/* No more nodes, goto next zone */
945+
bm->cur.zone = list_entry(bm->cur.zone->list.next,
946+
struct mem_zone_bm_rtree, list);
947+
if (&bm->cur.zone->list != &bm->zones) {
948+
bm->cur.node = list_entry(bm->cur.zone->leaves.next,
949+
struct rtree_node, list);
950+
bm->cur.node_pfn = 0;
951+
bm->cur.node_bit = 0;
952+
return true;
953+
}
954+
955+
/* No more zones */
956+
return false;
957+
}
958+
959+
/*
960+
* memory_bm_rtree_next_pfn - Find the next set bit
961+
*
962+
* Starting from the last returned position this function searches
963+
* for the next set bit in the memory bitmap and returns its
964+
* number. If no more bit is set BM_END_OF_MAP is returned.
965+
*/
966+
static unsigned long memory_bm_rtree_next_pfn(struct memory_bitmap *bm)
967+
{
968+
unsigned long bits, pfn, pages;
969+
int bit;
970+
971+
do {
972+
pages = bm->cur.zone->end_pfn - bm->cur.zone->start_pfn;
973+
bits = min(pages - bm->cur.node_pfn, BM_BITS_PER_BLOCK);
974+
bit = find_next_bit(bm->cur.node->data, bits,
975+
bm->cur.node_bit);
976+
if (bit < bits) {
977+
pfn = bm->cur.zone->start_pfn + bm->cur.node_pfn + bit;
978+
bm->cur.node_bit = bit + 1;
979+
return pfn;
980+
}
981+
} while (rtree_next_node(bm));
982+
983+
return BM_END_OF_MAP;
984+
}
985+
888986
/**
889987
* This structure represents a range of page frames the contents of which
890988
* should not be saved during the suspend.

0 commit comments

Comments
 (0)