38
38
#include <linux/rcupdate.h>
39
39
#include <linux/slab.h>
40
40
#include <linux/string.h>
41
+ #include <linux/xarray.h>
41
42
42
43
43
44
/* Number of nodes in fully populated tree of given height */
@@ -98,24 +99,7 @@ static inline void *node_to_entry(void *ptr)
98
99
return (void * )((unsigned long )ptr | RADIX_TREE_INTERNAL_NODE );
99
100
}
100
101
101
- #define RADIX_TREE_RETRY node_to_entry(NULL)
102
-
103
- #ifdef CONFIG_RADIX_TREE_MULTIORDER
104
- /* Sibling slots point directly to another slot in the same node */
105
- static inline
106
- bool is_sibling_entry (const struct radix_tree_node * parent , void * node )
107
- {
108
- void __rcu * * ptr = node ;
109
- return (parent -> slots <= ptr ) &&
110
- (ptr < parent -> slots + RADIX_TREE_MAP_SIZE );
111
- }
112
- #else
113
- static inline
114
- bool is_sibling_entry (const struct radix_tree_node * parent , void * node )
115
- {
116
- return false;
117
- }
118
- #endif
102
+ #define RADIX_TREE_RETRY XA_RETRY_ENTRY
119
103
120
104
static inline unsigned long
121
105
get_slot_offset (const struct radix_tree_node * parent , void __rcu * * slot )
@@ -129,16 +113,10 @@ static unsigned int radix_tree_descend(const struct radix_tree_node *parent,
129
113
unsigned int offset = (index >> parent -> shift ) & RADIX_TREE_MAP_MASK ;
130
114
void __rcu * * entry = rcu_dereference_raw (parent -> slots [offset ]);
131
115
132
- #ifdef CONFIG_RADIX_TREE_MULTIORDER
133
- if (radix_tree_is_internal_node (entry )) {
134
- if (is_sibling_entry (parent , entry )) {
135
- void __rcu * * sibentry ;
136
- sibentry = (void __rcu * * ) entry_to_node (entry );
137
- offset = get_slot_offset (parent , sibentry );
138
- entry = rcu_dereference_raw (* sibentry );
139
- }
116
+ if (xa_is_sibling (entry )) {
117
+ offset = xa_to_sibling (entry );
118
+ entry = rcu_dereference_raw (parent -> slots [offset ]);
140
119
}
141
- #endif
142
120
143
121
* nodep = (void * )entry ;
144
122
return offset ;
@@ -300,10 +278,10 @@ static void dump_node(struct radix_tree_node *node, unsigned long index)
300
278
} else if (!radix_tree_is_internal_node (entry )) {
301
279
pr_debug ("radix entry %p offset %ld indices %lu-%lu parent %p\n" ,
302
280
entry , i , first , last , node );
303
- } else if (is_sibling_entry ( node , entry )) {
281
+ } else if (xa_is_sibling ( entry )) {
304
282
pr_debug ("radix sblng %p offset %ld indices %lu-%lu parent %p val %p\n" ,
305
283
entry , i , first , last , node ,
306
- * ( void * * ) entry_to_node ( entry ));
284
+ node -> slots [ xa_to_sibling ( entry )] );
307
285
} else {
308
286
dump_node (entry_to_node (entry ), first );
309
287
}
@@ -881,8 +859,7 @@ static void radix_tree_free_nodes(struct radix_tree_node *node)
881
859
882
860
for (;;) {
883
861
void * entry = rcu_dereference_raw (child -> slots [offset ]);
884
- if (radix_tree_is_internal_node (entry ) && child -> shift &&
885
- !is_sibling_entry (child , entry )) {
862
+ if (xa_is_node (entry ) && child -> shift ) {
886
863
child = entry_to_node (entry );
887
864
offset = 0 ;
888
865
continue ;
@@ -904,7 +881,7 @@ static void radix_tree_free_nodes(struct radix_tree_node *node)
904
881
static inline int insert_entries (struct radix_tree_node * node ,
905
882
void __rcu * * slot , void * item , unsigned order , bool replace )
906
883
{
907
- struct radix_tree_node * child ;
884
+ void * sibling ;
908
885
unsigned i , n , tag , offset , tags = 0 ;
909
886
910
887
if (node ) {
@@ -922,7 +899,7 @@ static inline int insert_entries(struct radix_tree_node *node,
922
899
offset = offset & ~(n - 1 );
923
900
slot = & node -> slots [offset ];
924
901
}
925
- child = node_to_entry ( slot );
902
+ sibling = xa_mk_sibling ( offset );
926
903
927
904
for (i = 0 ; i < n ; i ++ ) {
928
905
if (slot [i ]) {
@@ -939,7 +916,7 @@ static inline int insert_entries(struct radix_tree_node *node,
939
916
for (i = 0 ; i < n ; i ++ ) {
940
917
struct radix_tree_node * old = rcu_dereference_raw (slot [i ]);
941
918
if (i ) {
942
- rcu_assign_pointer (slot [i ], child );
919
+ rcu_assign_pointer (slot [i ], sibling );
943
920
for (tag = 0 ; tag < RADIX_TREE_MAX_TAGS ; tag ++ )
944
921
if (tags & (1 << tag ))
945
922
tag_clear (node , tag , offset + i );
@@ -949,9 +926,7 @@ static inline int insert_entries(struct radix_tree_node *node,
949
926
if (tags & (1 << tag ))
950
927
tag_set (node , tag , offset );
951
928
}
952
- if (radix_tree_is_internal_node (old ) &&
953
- !is_sibling_entry (node , old ) &&
954
- (old != RADIX_TREE_RETRY ))
929
+ if (xa_is_node (old ))
955
930
radix_tree_free_nodes (old );
956
931
if (xa_is_value (old ))
957
932
node -> exceptional -- ;
@@ -1112,18 +1087,17 @@ static inline void replace_sibling_entries(struct radix_tree_node *node,
1112
1087
void __rcu * * slot , int count , int exceptional )
1113
1088
{
1114
1089
#ifdef CONFIG_RADIX_TREE_MULTIORDER
1115
- void * ptr = node_to_entry ( slot );
1116
- unsigned offset = get_slot_offset ( node , slot ) + 1 ;
1090
+ unsigned offset = get_slot_offset ( node , slot );
1091
+ void * ptr = xa_mk_sibling ( offset ) ;
1117
1092
1118
- while (offset < RADIX_TREE_MAP_SIZE ) {
1093
+ while (++ offset < RADIX_TREE_MAP_SIZE ) {
1119
1094
if (rcu_dereference_raw (node -> slots [offset ]) != ptr )
1120
1095
break ;
1121
1096
if (count < 0 ) {
1122
1097
node -> slots [offset ] = NULL ;
1123
1098
node -> count -- ;
1124
1099
}
1125
1100
node -> exceptional += exceptional ;
1126
- offset ++ ;
1127
1101
}
1128
1102
#endif
1129
1103
}
@@ -1319,8 +1293,7 @@ int radix_tree_split(struct radix_tree_root *root, unsigned long index,
1319
1293
tags |= 1 << tag ;
1320
1294
1321
1295
for (end = offset + 1 ; end < RADIX_TREE_MAP_SIZE ; end ++ ) {
1322
- if (!is_sibling_entry (parent ,
1323
- rcu_dereference_raw (parent -> slots [end ])))
1296
+ if (!xa_is_sibling (rcu_dereference_raw (parent -> slots [end ])))
1324
1297
break ;
1325
1298
for (tag = 0 ; tag < RADIX_TREE_MAX_TAGS ; tag ++ )
1326
1299
if (tags & (1 << tag ))
@@ -1618,7 +1591,7 @@ static void __rcu **skip_siblings(struct radix_tree_node **nodep,
1618
1591
{
1619
1592
while (iter -> index < iter -> next_index ) {
1620
1593
* nodep = rcu_dereference_raw (* slot );
1621
- if (* nodep && !is_sibling_entry ( iter -> node , * nodep ))
1594
+ if (* nodep && !xa_is_sibling ( * nodep ))
1622
1595
return slot ;
1623
1596
slot ++ ;
1624
1597
iter -> index = __radix_tree_iter_add (iter , 1 );
@@ -1769,7 +1742,7 @@ void __rcu **radix_tree_next_chunk(const struct radix_tree_root *root,
1769
1742
while (++ offset < RADIX_TREE_MAP_SIZE ) {
1770
1743
void * slot = rcu_dereference_raw (
1771
1744
node -> slots [offset ]);
1772
- if (is_sibling_entry ( node , slot ))
1745
+ if (xa_is_sibling ( slot ))
1773
1746
continue ;
1774
1747
if (slot )
1775
1748
break ;
@@ -2283,6 +2256,7 @@ void __init radix_tree_init(void)
2283
2256
2284
2257
BUILD_BUG_ON (RADIX_TREE_MAX_TAGS + __GFP_BITS_SHIFT > 32 );
2285
2258
BUILD_BUG_ON (ROOT_IS_IDR & ~GFP_ZONEMASK );
2259
+ BUILD_BUG_ON (XA_CHUNK_SIZE > 255 );
2286
2260
radix_tree_node_cachep = kmem_cache_create ("radix_tree_node" ,
2287
2261
sizeof (struct radix_tree_node ), 0 ,
2288
2262
SLAB_PANIC | SLAB_RECLAIM_ACCOUNT ,
0 commit comments