Lines Matching defs:mas
193 static void mas_set_height(struct ma_state *mas)
195 unsigned int new_flags = mas->tree->ma_flags;
198 MAS_BUG_ON(mas, mas->depth > MAPLE_HEIGHT_MAX);
199 new_flags |= mas->depth << MT_FLAGS_HEIGHT_OFFSET;
200 mas->tree->ma_flags = new_flags;
203 static unsigned int mas_mt_height(struct ma_state *mas)
205 return mt_height(mas->tree);
239 static inline void mas_set_err(struct ma_state *mas, long err)
241 mas->node = MA_ERROR(err);
244 static inline bool mas_is_ptr(const struct ma_state *mas)
246 return mas->node == MAS_ROOT;
249 static inline bool mas_is_start(const struct ma_state *mas)
251 return mas->node == MAS_START;
254 bool mas_is_err(struct ma_state *mas)
256 return xa_is_err(mas->node);
259 static __always_inline bool mas_is_overflow(struct ma_state *mas)
261 if (unlikely(mas->node == MAS_OVERFLOW))
267 static __always_inline bool mas_is_underflow(struct ma_state *mas)
269 if (unlikely(mas->node == MAS_UNDERFLOW))
275 static inline bool mas_searchable(struct ma_state *mas)
277 if (mas_is_none(mas))
280 if (mas_is_ptr(mas))
305 * @mas: The maple state
309 static inline struct maple_node *mas_mn(const struct ma_state *mas)
311 return mte_to_node(mas->node);
373 static inline bool mas_is_root_limits(const struct ma_state *mas)
375 return !mas->min && mas->max == ULONG_MAX;
447 * @mas: The maple state
452 enum maple_type mas_parent_type(struct ma_state *mas, struct maple_enode *enode)
464 if (mt_is_alloc(mas->tree))
482 void mas_set_parent(struct ma_state *mas, struct maple_enode *enode,
490 MAS_BUG_ON(mas, p_type == maple_dense);
491 MAS_BUG_ON(mas, p_type == maple_leaf_64);
578 * @mas: The maple state
587 static inline unsigned long mas_allocated(const struct ma_state *mas)
589 if (!mas->alloc || ((unsigned long)mas->alloc & 0x1))
592 return mas->alloc->total;
597 * @mas: the maple state
601 * located in @mas->alloc->request_count, or directly in @mas->alloc if there is
603 * encoding to store in @mas->alloc directly.
605 static inline void mas_set_alloc_req(struct ma_state *mas, unsigned long count)
607 if (!mas->alloc || ((unsigned long)mas->alloc & 0x1)) {
609 mas->alloc = NULL;
611 mas->alloc = (struct maple_alloc *)(((count) << 1U) | 1U);
615 mas->alloc->request_count = count;
620 * @mas: The maple state
622 * The alloc count is either stored directly in @mas, or in
623 * @mas->alloc->request_count if there is at least one node allocated. Decode
624 * the request count if it's stored directly in @mas->alloc.
628 static inline unsigned int mas_alloc_req(const struct ma_state *mas)
630 if ((unsigned long)mas->alloc & 0x1)
631 return (unsigned long)(mas->alloc) >> 1;
632 else if (mas->alloc)
633 return mas->alloc->request_count;
684 * @mas: The maple state.
689 static inline unsigned long mas_pivot(struct ma_state *mas, unsigned char piv)
691 struct maple_node *node = mas_mn(mas);
692 enum maple_type type = mte_node_type(mas->node);
694 if (MAS_WARN_ON(mas, piv >= mt_pivots[type])) {
695 mas_set_err(mas, -EIO);
712 * mas_safe_pivot() - get the pivot at @piv or mas->max.
713 * @mas: The maple state
718 * Return: The pivot at @piv within the limit of the @pivots array, @mas->max
722 mas_safe_pivot(const struct ma_state *mas, unsigned long *pivots,
726 return mas->max;
733 * @mas: The maple state
740 mas_safe_min(struct ma_state *mas, unsigned long *pivots, unsigned char offset)
745 return mas->min;
822 * @mas: The maple state
828 static inline void *mas_slot_locked(struct ma_state *mas, void __rcu **slots,
831 return mt_slot_locked(mas->tree, slots, offset);
836 * @mas: The maple state
842 static inline void *mas_slot(struct ma_state *mas, void __rcu **slots,
845 return mt_slot(mas->tree, slots, offset);
850 * @mas: The maple state.
854 static inline void *mas_root(struct ma_state *mas)
856 return rcu_dereference_check(mas->tree->ma_root, mt_locked(mas->tree));
866 * @mas: The maple state.
870 static inline void *mas_root_locked(struct ma_state *mas)
872 return mt_root_locked(mas->tree);
1006 * @mas - the maple state
1011 static void mas_mat_destroy(struct ma_state *mas, struct ma_topiary *mat)
1015 bool in_rcu = mt_in_rcu(mas->tree);
1020 mt_destroy_walk(mat->head, mas->tree, !in_rcu);
1028 * @mas - the maple state.
1032 static inline void mas_descend(struct ma_state *mas)
1039 node = mas_mn(mas);
1040 type = mte_node_type(mas->node);
1044 if (mas->offset)
1045 mas->min = pivots[mas->offset - 1] + 1;
1046 mas->max = mas_safe_pivot(mas, pivots, mas->offset, type);
1047 mas->node = mas_slot(mas, slots, mas->offset);
1070 * @mas: The maple state
1072 * Sets the @mas->max and @mas->min to the correct values when walking up. This
1077 static int mas_ascend(struct ma_state *mas)
1089 a_node = mas_mn(mas);
1091 mas->offset = 0;
1095 p_node = mte_parent(mas->node);
1099 a_type = mas_parent_type(mas, mas->node);
1100 mas->offset = mte_parent_slot(mas->node);
1104 if (p_node != mte_parent(mas->node))
1107 mas->node = a_enode;
1110 mas->max = ULONG_MAX;
1111 mas->min = 0;
1115 if (!mas->min)
1118 if (mas->max == ULONG_MAX)
1125 a_type = mas_parent_type(mas, p_enode);
1152 mas->max = max;
1153 mas->min = min;
1159 * @mas: The maple state
1163 static inline struct maple_node *mas_pop_node(struct ma_state *mas)
1165 struct maple_alloc *ret, *node = mas->alloc;
1166 unsigned long total = mas_allocated(mas);
1167 unsigned int req = mas_alloc_req(mas);
1175 mas->alloc = NULL;
1182 mas->alloc = node->slot[0];
1183 mas->alloc->total = node->total - 1;
1195 mas_set_alloc_req(mas, req);
1204 * @mas: The maple state
1207 * Stores the maple node back into @mas->alloc for reuse. Updates allocated and
1210 static inline void mas_push_node(struct ma_state *mas, struct maple_node *used)
1213 struct maple_alloc *head = mas->alloc;
1215 unsigned int requested = mas_alloc_req(mas);
1217 count = mas_allocated(mas);
1234 mas->alloc = reuse;
1237 mas_set_alloc_req(mas, requested - 1);
1242 * @mas: The maple state
1245 static inline void mas_alloc_nodes(struct ma_state *mas, gfp_t gfp)
1248 unsigned long allocated = mas_allocated(mas);
1249 unsigned int requested = mas_alloc_req(mas);
1257 mas_set_alloc_req(mas, 0);
1258 if (mas->mas_flags & MA_STATE_PREALLOC) {
1264 if (!allocated || mas->alloc->node_count == MAPLE_ALLOC_SLOTS) {
1270 node->slot[0] = mas->alloc;
1276 mas->alloc = node;
1281 node = mas->alloc;
1301 mas->alloc->total = allocated;
1308 mas_set_alloc_req(mas, requested);
1309 if (mas->alloc && !(((unsigned long)mas->alloc & 0x1)))
1310 mas->alloc->total = allocated;
1311 mas_set_err(mas, -ENOMEM);
1316 * @mas: The maple state
1322 static inline void mas_free(struct ma_state *mas, struct maple_enode *used)
1326 if (mt_in_rcu(mas->tree))
1329 mas_push_node(mas, tmp);
1335 * @mas: The maple state
1339 static void mas_node_count_gfp(struct ma_state *mas, int count, gfp_t gfp)
1341 unsigned long allocated = mas_allocated(mas);
1344 mas_set_alloc_req(mas, count - allocated);
1345 mas_alloc_nodes(mas, gfp);
1352 * @mas: The maple state
1357 static void mas_node_count(struct ma_state *mas, int count)
1359 return mas_node_count_gfp(mas, count, GFP_NOWAIT | __GFP_NOWARN);
1364 * @mas: The maple state.
1366 * If mas->node == MAS_START, then set the min, max and depth to
1370 * - If mas->node is an error or not MAS_START, return NULL.
1371 * - If it's an empty tree: NULL & mas->node == MAS_NONE
1372 * - If it's a single entry: The entry & mas->node == MAS_ROOT
1373 * - If it's a tree: NULL & mas->node == safe root node.
1375 static inline struct maple_enode *mas_start(struct ma_state *mas)
1377 if (likely(mas_is_start(mas))) {
1380 mas->min = 0;
1381 mas->max = ULONG_MAX;
1384 mas->depth = 0;
1385 root = mas_root(mas);
1388 mas->depth = 1;
1389 mas->node = mte_safe_root(root);
1390 mas->offset = 0;
1391 if (mte_dead_node(mas->node))
1399 mas->node = MAS_NONE;
1400 mas->offset = MAPLE_NODE_SLOTS;
1405 mas->node = MAS_ROOT;
1406 mas->offset = MAPLE_NODE_SLOTS;
1409 if (mas->index > 0)
1453 * @mas: the maple state
1460 static inline unsigned char mas_data_end(struct ma_state *mas)
1467 type = mte_node_type(mas->node);
1468 node = mas_mn(mas);
1480 if (likely(pivots[offset] == mas->max))
1488 * @mas - the maple state
1492 static unsigned long mas_leaf_max_gap(struct ma_state *mas)
1502 mt = mte_node_type(mas->node);
1503 mn = mas_mn(mas);
1528 max_gap = pivots[0] - mas->min + 1;
1535 max_piv = ma_data_end(mn, mt, pivots, mas->max) - 1;
1540 if (unlikely(mas->max == ULONG_MAX) && !slots[max_piv + 1]) {
1594 * @mas: The maple state.
1598 static inline unsigned long mas_max_gap(struct ma_state *mas)
1605 mt = mte_node_type(mas->node);
1607 return mas_leaf_max_gap(mas);
1609 node = mas_mn(mas);
1610 MAS_BUG_ON(mas, mt != maple_arange_64);
1618 * @mas: The maple state
1625 static inline void mas_parent_gap(struct ma_state *mas, unsigned char offset,
1635 pnode = mte_parent(mas->node);
1636 pmt = mas_parent_type(mas, mas->node);
1641 MAS_BUG_ON(mas, pmt != maple_arange_64);
1665 pmt = mas_parent_type(mas, penode);
1674 * @mas - the maple state.
1676 static inline void mas_update_gap(struct ma_state *mas)
1682 if (!mt_is_alloc(mas->tree))
1685 if (mte_is_root(mas->node))
1688 max_gap = mas_max_gap(mas);
1690 pslot = mte_parent_slot(mas->node);
1691 p_gap = ma_gaps(mte_parent(mas->node),
1692 mas_parent_type(mas, mas->node))[pslot];
1695 mas_parent_gap(mas, pslot, max_gap);
1701 * @mas - the maple state (for the tree)
1704 static inline void mas_adopt_children(struct ma_state *mas,
1714 offset = ma_data_end(node, type, pivots, mas->max);
1716 child = mas_slot_locked(mas, slots, offset);
1717 mas_set_parent(mas, child, parent, offset);
1724 * @mas - the maple state with the new node
1727 static inline void mas_put_in_tree(struct ma_state *mas,
1729 __must_hold(mas->tree->ma_lock)
1734 if (mte_is_root(mas->node)) {
1735 mas_mn(mas)->parent = ma_parent_ptr(mas_tree_parent(mas));
1736 rcu_assign_pointer(mas->tree->ma_root, mte_mk_root(mas->node));
1737 mas_set_height(mas);
1740 offset = mte_parent_slot(mas->node);
1741 slots = ma_slots(mte_parent(mas->node),
1742 mas_parent_type(mas, mas->node));
1743 rcu_assign_pointer(slots[offset], mas->node);
1753 * @mas - the ma_state with @mas->node pointing to the new node.
1756 static inline void mas_replace_node(struct ma_state *mas,
1758 __must_hold(mas->tree->ma_lock)
1760 mas_put_in_tree(mas, old_enode);
1761 mas_free(mas, old_enode);
1765 * mas_find_child() - Find a child who has the parent @mas->node.
1766 * @mas: the maple state with the parent.
1769 static inline bool mas_find_child(struct ma_state *mas, struct ma_state *child)
1770 __must_hold(mas->tree->ma_lock)
1780 mt = mte_node_type(mas->node);
1781 node = mas_mn(mas);
1784 end = ma_data_end(node, mt, pivots, mas->max);
1785 for (offset = mas->offset; offset <= end; offset++) {
1786 entry = mas_slot_locked(mas, slots, offset);
1788 *child = *mas;
1789 mas->offset = offset + 1;
1872 static inline int mab_calc_split(struct ma_state *mas,
1885 if (unlikely((mas->mas_flags & MA_STATE_BULK))) {
1892 mas->mas_flags |= MA_STATE_REBALANCE;
1936 * @mas: The maple state
1942 static inline void mas_mab_cp(struct ma_state *mas, unsigned char mas_start,
1953 node = mas_mn(mas);
1954 mt = mte_node_type(mas->node);
1969 if (unlikely(mas->max == b_node->pivot[j]))
1974 b_node->pivot[j] = mas_safe_pivot(mas, pivots, i, mt);
1981 if (!ma_is_leaf(mt) && mt_is_alloc(mas->tree)) {
1990 * @mas: The maple state
2000 static inline void mas_leaf_set_meta(struct ma_state *mas,
2008 if (pivots[end] && pivots[end] < mas->max)
2020 * @mas: The maple state with the maple encoded node.
2024 struct ma_state *mas, bool new_max)
2027 enum maple_type mt = mte_node_type(mas->node);
2028 struct maple_node *node = mte_to_node(mas->node);
2049 mas->max = b_node->pivot[i - 1];
2052 if (likely(!ma_is_leaf(mt) && mt_is_alloc(mas->tree))) {
2067 mas_leaf_set_meta(mas, node, pivots, mt, end);
2073 * @mas: The maple state
2077 static inline void mas_bulk_rebalance(struct ma_state *mas, unsigned char end,
2080 if (!(mas->mas_flags & MA_STATE_BULK))
2083 if (mte_is_root(mas->node))
2087 mas->mas_flags &= ~MA_STATE_REBALANCE;
2108 struct ma_state *mas = wr_mas->mas;
2112 slot = mas->offset;
2115 mas_mab_cp(mas, 0, slot - 1, b_node, 0);
2119 piv = mas->min - 1;
2121 if (piv + 1 < mas->index) {
2125 b_node->gap[b_end] = mas->index - 1 - piv;
2126 b_node->pivot[b_end++] = mas->index - 1;
2130 mas->offset = b_end;
2132 b_node->pivot[b_end] = mas->last;
2135 if (mas->last >= mas->max)
2139 piv = mas_safe_pivot(mas, wr_mas->pivots, offset_end, wr_mas->type);
2140 if (piv > mas->last) {
2142 mas_bulk_rebalance(mas, b_node->b_end, wr_mas->type);
2145 wr_mas->content = mas_slot_locked(mas, wr_mas->slots,
2150 b_node->gap[b_end] = piv - mas->last + 1;
2159 mas_mab_cp(mas, slot, wr_mas->node_end + 1, b_node, ++b_end);
2169 * @mas: the maple state
2173 static inline bool mas_prev_sibling(struct ma_state *mas)
2175 unsigned int p_slot = mte_parent_slot(mas->node);
2177 if (mte_is_root(mas->node))
2183 mas_ascend(mas);
2184 mas->offset = p_slot - 1;
2185 mas_descend(mas);
2191 * @mas: the maple state
2195 static inline bool mas_next_sibling(struct ma_state *mas)
2197 MA_STATE(parent, mas->tree, mas->index, mas->last);
2199 if (mte_is_root(mas->node))
2202 parent = *mas;
2204 parent.offset = mte_parent_slot(mas->node) + 1;
2208 *mas = parent;
2209 mas_descend(mas);
2230 * mas_wr_node_walk() - Find the correct offset for the index in the @mas.
2237 struct ma_state *mas = wr_mas->mas;
2241 wr_mas->r_max = wr_mas->r_min = mas->index;
2242 mas->offset = mas->index = mas->min;
2246 wr_mas->node = mas_mn(wr_mas->mas);
2249 wr_mas->pivots, mas->max);
2250 offset = mas->offset;
2252 while (offset < count && mas->index > wr_mas->pivots[offset])
2255 wr_mas->r_max = offset < count ? wr_mas->pivots[offset] : mas->max;
2256 wr_mas->r_min = mas_safe_min(mas, wr_mas->pivots, offset);
2257 wr_mas->offset_end = mas->offset = offset;
2365 wr_mas.mas = mast->orig_l;
2374 * @mas: the maple state with the allocations.
2383 *mas_new_ma_node(struct ma_state *mas, struct maple_big_node *b_node)
2385 return mt_mk_node(ma_mnode_ptr(mas_pop_node(mas)), b_node->type);
2391 * @mas: the maple state that contains the allocations.
2400 static inline unsigned char mas_mab_to_node(struct ma_state *mas,
2408 *left = mas_new_ma_node(mas, b_node);
2416 split = mab_calc_split(mas, b_node, mid_split, min);
2417 *right = mas_new_ma_node(mas, b_node);
2421 *middle = mas_new_ma_node(mas, b_node);
2431 * @mas - the maple state to get the pivot (mas->max)
2435 struct ma_state *mas,
2442 if (mt_is_alloc(mas->tree))
2443 b_node->gap[b_node->b_end] = mas_max_gap(mas);
2444 b_node->pivot[b_node->b_end++] = mas->max;
2449 * of @mas->node to either @left or @right, depending on @slot and @split
2451 * @mas - the maple state with the node that needs a parent
2454 * @slot - the slot the mas->node was placed
2457 static inline void mas_set_split_parent(struct ma_state *mas,
2462 if (mas_is_none(mas))
2466 mas_set_parent(mas, mas->node, left, *slot);
2468 mas_set_parent(mas, mas->node, right, (*slot) - split - 1);
2539 * @mas: The maple state for pushing nodes
2545 static inline void mas_topiary_node(struct ma_state *mas,
2558 mas_push_node(mas, tmp);
2574 * @mas: The maple state pointing at the new data
2578 static inline void mas_topiary_replace(struct ma_state *mas,
2582 MA_TOPIARY(subtrees, mas->tree);
2587 mas_put_in_tree(mas, old_enode);
2590 tmp[0] = *mas;
2609 if (MAS_WARN_ON(mas, n == 0))
2621 return mas_free(mas, old_enode);
2623 tmp[0] = *mas;
2628 in_rcu = mt_in_rcu(mas->tree);
2649 if (MAS_WARN_ON(mas, n == 0))
2656 mas_topiary_node(mas, tmp[i].node, in_rcu);
2662 mas_topiary_node(mas, tmp[i].node, in_rcu);
2664 mas_mat_destroy(mas, &subtrees);
2669 * @mas: The maple state
2674 static inline void mas_wmb_replace(struct ma_state *mas,
2678 mas_topiary_replace(mas, old_enode);
2680 if (mte_is_leaf(mas->node))
2683 mas_update_gap(mas);
2783 static inline void *mtree_range_walk(struct ma_state *mas)
2795 next = mas->node;
2796 min = mas->min;
2797 max = mas->max;
2808 if (pivots[offset] >= mas->index) {
2817 } while ((offset < end) && (pivots[offset] < mas->index));
2827 next = mt_slot(mas->tree, slots, offset);
2832 mas->offset = offset;
2833 mas->index = min;
2834 mas->last = max;
2835 mas->min = prev_min;
2836 mas->max = prev_max;
2837 mas->node = last;
2841 mas_reset(mas);
2847 * @mas: The starting maple state
2854 * iteration. @mas is used to obtain allocations. orig_l_mas keeps track of the
2863 static int mas_spanning_rebalance(struct ma_state *mas,
2871 MA_STATE(l_mas, mas->tree, mas->index, mas->index);
2872 MA_STATE(r_mas, mas->tree, mas->index, mas->last);
2873 MA_STATE(m_mas, mas->tree, mas->index, mas->index);
2905 split = mas_mab_to_node(mas, mast->bn, &left, &right, &middle,
2951 l_mas.node = mt_mk_node(ma_mnode_ptr(mas_pop_node(mas)),
2955 mas_set_parent(mas, left, l_mas.node, slot);
2957 mas_set_parent(mas, middle, l_mas.node, ++slot);
2960 mas_set_parent(mas, right, l_mas.node, ++slot);
2964 mas_mn(mast->l)->parent = ma_parent_ptr(mas_tree_parent(mas));
2972 mas->depth = l_mas.depth;
2973 mas->node = l_mas.node;
2974 mas->min = l_mas.min;
2975 mas->max = l_mas.max;
2976 mas->offset = l_mas.offset;
2977 mas_wmb_replace(mas, old_enode);
2978 mtree_range_walk(mas);
2984 * @mas: The maple state
2992 static inline int mas_rebalance(struct ma_state *mas,
2995 char empty_count = mas_mt_height(mas);
2999 MA_STATE(l_mas, mas->tree, mas->index, mas->last);
3000 MA_STATE(r_mas, mas->tree, mas->index, mas->last);
3002 trace_ma_op(__func__, mas);
3013 mas_node_count(mas, empty_count * 2 - 1);
3014 if (mas_is_err(mas))
3020 mast.bn->type = mte_node_type(mas->node);
3022 l_mas = r_mas = *mas;
3031 mas->offset += shift;
3037 return mas_spanning_rebalance(mas, &mast, empty_count);
3043 * @mas: The maple state
3049 static inline void mas_destroy_rebalance(struct ma_state *mas, unsigned char end)
3051 enum maple_type mt = mte_node_type(mas->node);
3057 bool in_rcu = mt_in_rcu(mas->tree);
3059 MA_STATE(l_mas, mas->tree, mas->index, mas->last);
3061 l_mas = *mas;
3067 mas_node_count(mas, 3);
3068 if (mas_is_err(mas))
3071 newnode = mas_pop_node(mas);
3076 node = mas_mn(mas);
3094 mas->min = l_mas.max + 1;
3124 /* RCU requires replacing both l_mas, mas, and parent. */
3125 mas->node = mt_mk_node(newnode, mt);
3128 new_left = mas_pop_node(mas);
3139 offset = mte_parent_slot(mas->node);
3141 parent = mas_pop_node(mas);
3145 rcu_assign_pointer(slots[offset], mas->node);
3150 gap = mas_leaf_max_gap(mas);
3151 mte_set_gap(eparent, mte_parent_slot(mas->node), gap);
3154 mas_ascend(mas);
3157 mas_replace_node(mas, old_eparent);
3158 mas_adopt_children(mas, mas->node);
3161 mas_update_gap(mas);
3167 * @mas: The maple state
3171 struct ma_state *mas, int height)
3175 if (mte_is_root(mas->node)) {
3176 if (mt_is_alloc(mas->tree))
3180 mas->depth = height;
3186 ancestor = mas_new_ma_node(mas, mast->bn);
3187 mas_set_parent(mas, mast->l->node, ancestor, mast->l->offset);
3188 mas_set_parent(mas, mast->r->node, ancestor, mast->r->offset);
3189 mte_to_node(ancestor)->parent = mas_mn(mas)->parent;
3193 mas->offset = mast->bn->b_end - 1;
3200 * @mas: the maple state
3204 struct ma_state *mas,
3215 if (mte_is_root(mas->node)) {
3218 mas_ascend(mas);
3219 mas->offset = mte_parent_slot(mas->node);
3223 mas_mab_cp(mas, 0, mast->l->offset - 1, mast->bn, 0);
3229 if (mast->bn->pivot[mast->bn->b_end - 1] == mas->max)
3233 mas_mab_cp(mas, split + skip, mt_slot_count(mas->node) - 1,
3237 mast->bn->type = mte_node_type(mas->node);
3244 * @mas: The maple state
3248 struct ma_state *mas, unsigned char split)
3255 mast->l->offset = mte_parent_slot(mas->node);
3258 if (mte_is_leaf(mas->node))
3271 * @mas: The maple state
3280 static inline bool mas_push_data(struct ma_state *mas, int height,
3286 MA_STATE(tmp_mas, mas->tree, mas->index, mas->last);
3287 tmp_mas = *mas;
3297 space = 2 * mt_slot_count(mas->node) - 2;
3302 if (mas->max == ULONG_MAX)
3321 /* Switch mas to prev node */
3322 *mas = tmp_mas;
3336 mast_split_data(mast, mas, split);
3337 mast_fill_bnode(mast, mas, 2);
3338 mas_split_final_node(mast, mas, height + 1);
3344 * @mas: The maple state
3348 static int mas_split(struct ma_state *mas, struct maple_big_node *b_node)
3372 MA_STATE(l_mas, mas->tree, mas->index, mas->last);
3373 MA_STATE(r_mas, mas->tree, mas->index, mas->last);
3374 MA_STATE(prev_l_mas, mas->tree, mas->index, mas->last);
3375 MA_STATE(prev_r_mas, mas->tree, mas->index, mas->last);
3377 trace_ma_op(__func__, mas);
3378 mas->depth = mas_mt_height(mas);
3380 mas_node_count(mas, 1 + mas->depth * 2);
3381 if (mas_is_err(mas))
3390 while (height++ <= mas->depth) {
3392 mas_split_final_node(&mast, mas, height);
3396 l_mas = r_mas = *mas;
3397 l_mas.node = mas_new_ma_node(mas, b_node);
3398 r_mas.node = mas_new_ma_node(mas, b_node);
3407 if (mas_push_data(mas, height, &mast, true))
3411 if (mas_push_data(mas, height, &mast, false))
3414 split = mab_calc_split(mas, b_node, &mid_split, prev_l_mas.min);
3415 mast_split_data(&mast, mas, split);
3420 mast.r->max = mas->max;
3421 mast_fill_bnode(&mast, mas, 1);
3427 old = mas->node;
3428 mas->node = l_mas.node;
3429 mas_wmb_replace(mas, old);
3430 mtree_range_walk(mas);
3448 if (mt_in_rcu(wr_mas->mas->tree))
3457 mab_mas_cp(bn, 0, bn->b_end, wr_mas->mas, false);
3475 old_enode = wr_mas->mas->node;
3478 (mas_mt_height(wr_mas->mas) > 1))
3479 return mas_rebalance(wr_mas->mas, b_node);
3482 return mas_split(wr_mas->mas, b_node);
3487 mas_node_count(wr_mas->mas, 1);
3488 if (mas_is_err(wr_mas->mas))
3491 node = mas_pop_node(wr_mas->mas);
3492 node->parent = mas_mn(wr_mas->mas)->parent;
3493 wr_mas->mas->node = mt_mk_node(node, b_type);
3494 mab_mas_cp(b_node, 0, b_end, wr_mas->mas, false);
3495 mas_replace_node(wr_mas->mas, old_enode);
3497 mas_update_gap(wr_mas->mas);
3503 * @mas: The maple state
3506 static inline int mas_root_expand(struct ma_state *mas, void *entry)
3508 void *contents = mas_root_locked(mas);
3515 mas_node_count(mas, 1);
3516 if (unlikely(mas_is_err(mas)))
3519 node = mas_pop_node(mas);
3522 node->parent = ma_parent_ptr(mas_tree_parent(mas));
3523 mas->node = mt_mk_node(node, type);
3525 if (mas->index) {
3528 if (likely(mas->index > 1))
3531 pivots[slot++] = mas->index - 1;
3535 mas->offset = slot;
3536 pivots[slot] = mas->last;
3537 if (mas->last != ULONG_MAX)
3540 mas->depth = 1;
3541 mas_set_height(mas);
3544 rcu_assign_pointer(mas->tree->ma_root, mte_mk_root(mas->node));
3548 static inline void mas_store_root(struct ma_state *mas, void *entry)
3550 if (likely((mas->last != 0) || (mas->index != 0)))
3551 mas_root_expand(mas, entry);
3553 mas_root_expand(mas, entry);
3555 rcu_assign_pointer(mas->tree->ma_root, entry);
3556 mas->node = MAS_START;
3563 * @mas: The maple state
3576 unsigned long last = wr_mas->mas->last;
3585 max = wr_mas->mas->max;
3599 trace_ma_write(__func__, wr_mas->mas, wr_mas->r_max, entry);
3605 wr_mas->type = mte_node_type(wr_mas->mas->node);
3612 wr_mas->mas->max = wr_mas->r_max;
3613 wr_mas->mas->min = wr_mas->r_min;
3614 wr_mas->mas->node = wr_mas->content;
3615 wr_mas->mas->offset = 0;
3616 wr_mas->mas->depth++;
3628 struct ma_state *mas = wr_mas->mas;
3635 wr_mas->content = mas_slot_locked(mas, wr_mas->slots,
3636 mas->offset);
3648 struct ma_state *mas = wr_mas->mas;
3652 wr_mas->content = mas_slot_locked(mas, wr_mas->slots,
3653 mas->offset);
3669 struct ma_state *r_mas = r_wr_mas->mas;
3670 struct ma_state *l_mas = l_wr_mas->mas;
3701 static inline void *mas_state_walk(struct ma_state *mas)
3705 entry = mas_start(mas);
3706 if (mas_is_none(mas))
3709 if (mas_is_ptr(mas))
3712 return mtree_range_walk(mas);
3719 * @mas: The maple state.
3721 * Note: Leaves mas in undesirable state.
3722 * Return: The entry for @mas->index or %NULL on dead node.
3724 static inline void *mtree_lookup_walk(struct ma_state *mas)
3735 next = mas->node;
3746 if (pivots[offset] >= mas->index) {
3753 next = mt_slot(mas->tree, slots, offset);
3761 mas_reset(mas);
3769 * @mas: The maple state
3776 static inline int mas_new_root(struct ma_state *mas, void *entry)
3778 struct maple_enode *root = mas_root_locked(mas);
3784 if (!entry && !mas->index && mas->last == ULONG_MAX) {
3785 mas->depth = 0;
3786 mas_set_height(mas);
3787 rcu_assign_pointer(mas->tree->ma_root, entry);
3788 mas->node = MAS_START;
3792 mas_node_count(mas, 1);
3793 if (mas_is_err(mas))
3796 node = mas_pop_node(mas);
3799 node->parent = ma_parent_ptr(mas_tree_parent(mas));
3800 mas->node = mt_mk_node(node, type);
3802 pivots[0] = mas->last;
3803 mas->depth = 1;
3804 mas_set_height(mas);
3805 rcu_assign_pointer(mas->tree->ma_root, mte_mk_root(mas->node));
3809 mte_destroy_walk(root, mas->tree);
3816 * Note that mas is expected to point to the node which caused the store to
3826 struct ma_state *mas;
3847 mas = wr_mas->mas;
3848 trace_ma_op(__func__, mas);
3850 if (unlikely(!mas->index && mas->last == ULONG_MAX))
3851 return mas_new_root(mas, wr_mas->entry);
3856 height = mas_mt_height(mas);
3857 mas_node_count(mas, 1 + height * 3);
3858 if (mas_is_err(mas))
3866 r_mas = *mas;
3873 r_mas.last = r_mas.index = mas->last;
3876 l_mas = *mas;
3881 mas->offset = l_mas.offset;
3882 mas->index = l_mas.index;
3883 mas->last = l_mas.last = r_mas.last;
3888 mas_set_range(mas, 0, ULONG_MAX);
3889 return mas_new_root(mas, wr_mas->entry);
3903 l_mas.index = l_mas.last = mas->index;
3909 return mas_spanning_rebalance(mas, &mast, height + 1);
3923 struct ma_state *mas = wr_mas->mas;
3929 bool in_rcu = mt_in_rcu(mas->tree);
3932 if (!mte_is_root(mas->node) && (new_end <= mt_min_slots[wr_mas->type]) &&
3933 !(mas->mas_flags & MA_STATE_BULK))
3936 if (mas->last == wr_mas->end_piv)
3939 mas_bulk_rebalance(mas, wr_mas->node_end, wr_mas->type);
3943 mas_node_count(mas, 1);
3944 if (mas_is_err(mas))
3947 newnode = mas_pop_node(mas);
3953 newnode->parent = mas_mn(mas)->parent;
3957 memcpy(dst_pivots, wr_mas->pivots, sizeof(unsigned long) * mas->offset);
3958 memcpy(dst_slots, wr_mas->slots, sizeof(void *) * mas->offset);
3961 if (wr_mas->r_min < mas->index) {
3962 rcu_assign_pointer(dst_slots[mas->offset], wr_mas->content);
3963 dst_pivots[mas->offset++] = mas->index - 1;
3967 if (mas->offset < node_pivots)
3968 dst_pivots[mas->offset] = mas->last;
3969 rcu_assign_pointer(dst_slots[mas->offset], wr_mas->entry);
3978 dst_offset = mas->offset + 1;
3987 dst_pivots[new_end] = mas->max;
3990 mas_leaf_set_meta(mas, newnode, dst_pivots, maple_leaf_64, new_end);
3992 struct maple_enode *old_enode = mas->node;
3994 mas->node = mt_mk_node(newnode, wr_mas->type);
3995 mas_replace_node(mas, old_enode);
3999 trace_ma_write(__func__, mas, 0, wr_mas->entry);
4000 mas_update_gap(mas);
4012 struct ma_state *mas = wr_mas->mas;
4013 unsigned char offset = mas->offset;
4017 gap |= !mt_slot_locked(mas->tree, slots, offset);
4018 gap |= !mt_slot_locked(mas->tree, slots, offset + 1);
4021 if (mas->index == wr_mas->r_min) {
4024 wr_mas->pivots[offset] = mas->last;
4028 wr_mas->pivots[offset] = mas->index - 1;
4029 mas->offset++; /* Keep mas accurate. */
4031 } else if (!mt_in_rcu(mas->tree)) {
4036 gap |= !mt_slot_locked(mas->tree, slots, offset + 2);
4038 wr_mas->pivots[offset] = mas->index - 1;
4039 wr_mas->pivots[offset + 1] = mas->last;
4040 mas->offset++; /* Keep mas accurate. */
4045 trace_ma_write(__func__, mas, 0, wr_mas->entry);
4051 mas_update_gap(mas);
4058 struct ma_state *mas = wr_mas->mas;
4062 mas->last = wr_mas->end_piv;
4065 if ((mas->last == wr_mas->end_piv) &&
4070 mas->last = mas->max;
4072 mas->last = wr_mas->pivots[wr_mas->offset_end];
4073 wr_mas->end_piv = mas->last;
4079 mas->index = wr_mas->r_min;
4082 if (mas->index == wr_mas->r_min && mas->offset &&
4083 !wr_mas->slots[mas->offset - 1]) {
4084 mas->offset--;
4085 wr_mas->r_min = mas->index =
4086 mas_safe_min(mas, wr_mas->pivots, mas->offset);
4087 wr_mas->r_max = wr_mas->pivots[mas->offset];
4095 (wr_mas->mas->last > wr_mas->pivots[wr_mas->offset_end]))
4101 wr_mas->end_piv = wr_mas->mas->max;
4109 struct ma_state *mas = wr_mas->mas;
4112 new_end -= wr_mas->offset_end - mas->offset;
4113 if (wr_mas->r_min == mas->index)
4116 if (wr_mas->end_piv == mas->last)
4136 struct ma_state *mas;
4140 mas = wr_mas->mas;
4141 if (mt_in_rcu(mas->tree))
4144 if (mas->offset != wr_mas->node_end)
4148 if (mas->offset != end)
4158 if (mas->last == wr_mas->r_max) {
4161 wr_mas->pivots[end] = mas->index - 1;
4162 mas->offset = new_end;
4166 wr_mas->pivots[end] = mas->last;
4172 wr_mas->pivots[end + 1] = mas->last;
4174 wr_mas->pivots[end] = mas->index - 1;
4175 mas->offset = end + 1;
4179 mas_update_gap(mas);
4181 trace_ma_write(__func__, mas, new_end, wr_mas->entry);
4195 trace_ma_write(__func__, wr_mas->mas, 0, wr_mas->entry);
4203 struct ma_state *mas = wr_mas->mas;
4207 if (wr_mas->r_min == mas->index && wr_mas->r_max == mas->last) {
4208 rcu_assign_pointer(wr_mas->slots[mas->offset], wr_mas->entry);
4210 mas_update_gap(mas);
4232 if (mas_is_err(mas))
4241 * @mas: The maple state
4248 struct ma_state *mas = wr_mas->mas;
4250 wr_mas->content = mas_start(mas);
4251 if (mas_is_none(mas) || mas_is_ptr(mas)) {
4252 mas_store_root(mas, wr_mas->entry);
4264 if (unlikely(!mas->index && mas->last == ULONG_MAX)) {
4265 mas_new_root(mas, wr_mas->entry);
4275 * @mas: The maple state
4281 static inline void *mas_insert(struct ma_state *mas, void *entry)
4283 MA_WR_STATE(wr_mas, mas, entry);
4299 wr_mas.content = mas_start(mas);
4303 if (mas_is_none(mas) || mas_is_ptr(mas)) {
4304 mas_store_root(mas, entry);
4313 wr_mas.offset_end = mas->offset;
4316 if (wr_mas.content || (mas->last > wr_mas.r_max))
4326 mas_set_err(mas, -EEXIST);
4331 static inline void mas_rewalk(struct ma_state *mas, unsigned long index)
4334 mas_set(mas, index);
4335 mas_state_walk(mas);
4336 if (mas_is_start(mas))
4340 static inline bool mas_rewalk_if_dead(struct ma_state *mas,
4344 mas_rewalk(mas, index);
4352 * tree. The prev value will be mas->node[mas->offset] or MAS_NONE.
4353 * @mas: The maple state
4356 * The prev node value will be mas->node[mas->offset] or MAS_NONE.
4359 static inline int mas_prev_node(struct ma_state *mas, unsigned long min)
4368 node = mas_mn(mas);
4369 if (!mas->min)
4372 max = mas->min - 1;
4382 if (unlikely(mas_ascend(mas)))
4384 offset = mas->offset;
4386 node = mas_mn(mas);
4390 mt = mte_node_type(mas->node);
4394 mas->node = mas_slot(mas, slots, offset);
4398 mt = mte_node_type(mas->node);
4399 node = mas_mn(mas);
4407 mas->node = mas_slot(mas, slots, offset);
4413 mas->min = pivots[offset - 1] + 1;
4414 mas->max = max;
4415 mas->offset = mas_data_end(mas);
4416 if (unlikely(mte_dead_node(mas->node)))
4425 mas->node = MAS_NONE;
4432 * @mas: The maple state
4435 * @set_underflow: Set the @mas->node to underflow state on limit.
4439 static void *mas_prev_slot(struct ma_state *mas, unsigned long min, bool empty,
4448 unsigned long save_point = mas->index;
4451 node = mas_mn(mas);
4452 type = mte_node_type(mas->node);
4454 if (unlikely(mas_rewalk_if_dead(mas, node, save_point)))
4457 if (mas->min <= min) {
4458 pivot = mas_safe_min(mas, pivots, mas->offset);
4460 if (unlikely(mas_rewalk_if_dead(mas, node, save_point)))
4468 if (likely(mas->offset)) {
4469 mas->offset--;
4470 mas->last = mas->index - 1;
4471 mas->index = mas_safe_min(mas, pivots, mas->offset);
4473 if (mas_prev_node(mas, min)) {
4474 mas_rewalk(mas, save_point);
4478 if (mas_is_none(mas))
4481 mas->last = mas->max;
4482 node = mas_mn(mas);
4483 type = mte_node_type(mas->node);
4485 mas->index = pivots[mas->offset - 1] + 1;
4489 entry = mas_slot(mas, slots, mas->offset);
4490 if (unlikely(mas_rewalk_if_dead(mas, node, save_point)))
4497 if (mas->index <= min)
4507 mas->node = MAS_UNDERFLOW;
4513 * @mas: The maple state
4516 * The next value will be mas->node[mas->offset] or MAS_NONE.
4519 static inline int mas_next_node(struct ma_state *mas, struct maple_node *node,
4530 if (mas->max >= max)
4533 min = mas->max + 1;
4540 if (unlikely(mas_ascend(mas)))
4544 node = mas_mn(mas);
4545 mt = mte_node_type(mas->node);
4547 node_end = ma_data_end(node, mt, pivots, mas->max);
4551 } while (unlikely(mas->offset == node_end));
4554 mas->offset++;
4555 enode = mas_slot(mas, slots, mas->offset);
4560 mas->offset = 0;
4564 mas->node = enode;
4565 node = mas_mn(mas);
4566 mt = mte_node_type(mas->node);
4568 enode = mas_slot(mas, slots, 0);
4573 if (!mas->offset)
4576 mas->max = mas_safe_pivot(mas, pivots, mas->offset, mt);
4580 mas->node = enode;
4581 mas->min = min;
4588 mas->node = MAS_NONE;
4595 * @mas: The maple state
4598 * @set_overflow: Should @mas->node be set to overflow when the limit is
4603 static void *mas_next_slot(struct ma_state *mas, unsigned long max, bool empty,
4612 unsigned long save_point = mas->last;
4616 node = mas_mn(mas);
4617 type = mte_node_type(mas->node);
4619 data_end = ma_data_end(node, type, pivots, mas->max);
4620 if (unlikely(mas_rewalk_if_dead(mas, node, save_point)))
4623 if (mas->max >= max) {
4624 if (likely(mas->offset < data_end))
4625 pivot = pivots[mas->offset];
4629 if (unlikely(mas_rewalk_if_dead(mas, node, save_point)))
4636 if (likely(mas->offset < data_end)) {
4637 mas->index = pivots[mas->offset] + 1;
4639 mas->offset++;
4640 if (likely(mas->offset < data_end))
4641 mas->last = pivots[mas->offset];
4643 mas->last = mas->max;
4645 if (mas_next_node(mas, node, max)) {
4646 mas_rewalk(mas, save_point);
4650 if (WARN_ON_ONCE(mas_is_none(mas))) {
4651 mas->node = MAS_OVERFLOW;
4656 mas->offset = 0;
4657 mas->index = mas->min;
4658 node = mas_mn(mas);
4659 type = mte_node_type(mas->node);
4661 mas->last = pivots[0];
4665 entry = mt_slot(mas->tree, slots, mas->offset);
4666 if (unlikely(mas_rewalk_if_dead(mas, node, save_point)))
4673 if (mas->last >= max)
4676 mas->index = mas->last + 1;
4685 mas->node = MAS_OVERFLOW;
4691 * @mas: The maple state
4694 * Set the @mas->node to the next entry and the range_start to
4696 * Sets @mas->index and @mas->last to the range, Does not update @mas->index and
4697 * @mas->last on overflow.
4702 static inline void *mas_next_entry(struct ma_state *mas, unsigned long limit)
4704 if (mas->last >= limit) {
4705 mas->node = MAS_OVERFLOW;
4709 return mas_next_slot(mas, limit, false, true);
4715 * @mas: The maple state
4721 static bool mas_rev_awalk(struct ma_state *mas, unsigned long size,
4724 enum maple_type type = mte_node_type(mas->node);
4725 struct maple_node *node = mas_mn(mas);
4732 if (unlikely(mas_is_err(mas)))
4737 mas->offset = (unsigned char)(mas->index - mas->min);
4744 offset = mas->offset;
4745 min = mas_safe_min(mas, pivots, offset);
4747 while (mas->last < min)
4748 min = mas_safe_min(mas, pivots, --offset);
4750 max = mas_safe_pivot(mas, pivots, offset, type);
4751 while (mas->index <= max) {
4755 else if (!mas_slot(mas, slots, offset))
4759 if ((size <= gap) && (size <= mas->last - min + 1))
4769 min = mas_safe_min(mas, pivots, offset);
4779 min = mas_safe_min(mas, pivots, offset);
4782 if (unlikely((mas->index > max) || (size - 1 > max - mas->index)))
4786 mas->offset = offset;
4793 mas->node = mas_slot(mas, slots, offset);
4794 mas->min = min;
4795 mas->max = max;
4796 mas->offset = mas_data_end(mas);
4800 if (!mte_is_root(mas->node))
4804 mas_set_err(mas, -EBUSY);
4808 static inline bool mas_anode_descend(struct ma_state *mas, unsigned long size)
4810 enum maple_type type = mte_node_type(mas->node);
4819 mas->offset = (unsigned char)(mas->index - mas->min);
4823 node = mas_mn(mas);
4827 offset = mas->offset;
4828 min = mas_safe_min(mas, pivots, offset);
4829 data_end = ma_data_end(node, type, pivots, mas->max);
4831 pivot = mas_safe_pivot(mas, pivots, offset, type);
4834 if (mas->index > pivot)
4839 else if (!mas_slot(mas, slots, offset))
4840 gap = min(pivot, mas->last) - max(mas->index, min) + 1;
4849 if (mas->index <= pivot) {
4850 mas->node = mas_slot(mas, slots, offset);
4851 mas->min = min;
4852 mas->max = pivot;
4859 if (mas->last <= pivot) {
4860 mas_set_err(mas, -EBUSY);
4865 if (mte_is_root(mas->node))
4868 mas->offset = offset;
4873 * mas_walk() - Search for @mas->index in the tree.
4874 * @mas: The maple state.
4876 * mas->index and mas->last will be set to the range if there is a value. If
4877 * mas->node is MAS_NONE, reset to MAS_START.
4881 void *mas_walk(struct ma_state *mas)
4885 if (!mas_is_active(mas) || !mas_is_start(mas))
4886 mas->node = MAS_START;
4888 entry = mas_state_walk(mas);
4889 if (mas_is_start(mas)) {
4891 } else if (mas_is_none(mas)) {
4892 mas->index = 0;
4893 mas->last = ULONG_MAX;
4894 } else if (mas_is_ptr(mas)) {
4895 if (!mas->index) {
4896 mas->last = 0;
4900 mas->index = 1;
4901 mas->last = ULONG_MAX;
4902 mas->node = MAS_NONE;
4910 static inline bool mas_rewind_node(struct ma_state *mas)
4915 if (mte_is_root(mas->node)) {
4916 slot = mas->offset;
4920 mas_ascend(mas);
4921 slot = mas->offset;
4925 mas->offset = --slot;
4931 * @mas: The maple state.
4935 static inline bool mas_skip_node(struct ma_state *mas)
4937 if (mas_is_err(mas))
4941 if (mte_is_root(mas->node)) {
4942 if (mas->offset >= mas_data_end(mas)) {
4943 mas_set_err(mas, -EBUSY);
4947 mas_ascend(mas);
4949 } while (mas->offset >= mas_data_end(mas));
4951 mas->offset++;
4958 * @mas: The maple state
4961 * Search between @mas->index and @mas->last for a gap of @size.
4963 static inline void mas_awalk(struct ma_state *mas, unsigned long size)
4974 while (!mas_is_err(mas) && !mas_anode_descend(mas, size)) {
4975 if (last == mas->node)
4976 mas_skip_node(mas);
4978 last = mas->node;
4985 * @mas: The maple state
4991 static inline int mas_sparse_area(struct ma_state *mas, unsigned long min,
4994 if (!unlikely(mas_is_none(mas)) && min == 0) {
5006 mas->index = min;
5007 mas->last = min + size - 1;
5009 mas->last = max;
5010 mas->index = max - size + 1;
5018 * @mas: The maple state
5023 int mas_empty_area(struct ma_state *mas, unsigned long min,
5036 if (mas_is_start(mas))
5037 mas_start(mas);
5038 else if (mas->offset >= 2)
5039 mas->offset -= 2;
5040 else if (!mas_skip_node(mas))
5044 if (mas_is_none(mas) || mas_is_ptr(mas))
5045 return mas_sparse_area(mas, min, max, size, true);
5048 mas->index = min;
5049 mas->last = max;
5050 mas_awalk(mas, size);
5052 if (unlikely(mas_is_err(mas)))
5053 return xa_err(mas->node);
5055 offset = mas->offset;
5059 mt = mte_node_type(mas->node);
5060 pivots = ma_pivots(mas_mn(mas), mt);
5061 min = mas_safe_min(mas, pivots, offset);
5062 if (mas->index < min)
5063 mas->index = min;
5064 mas->last = mas->index + size - 1;
5072 * @mas: The maple state
5077 int mas_empty_area_rev(struct ma_state *mas, unsigned long min,
5080 struct maple_enode *last = mas->node;
5088 if (mas_is_start(mas)) {
5089 mas_start(mas);
5090 mas->offset = mas_data_end(mas);
5091 } else if (mas->offset >= 2) {
5092 mas->offset -= 2;
5093 } else if (!mas_rewind_node(mas)) {
5098 if (mas_is_none(mas) || mas_is_ptr(mas))
5099 return mas_sparse_area(mas, min, max, size, false);
5102 mas->index = min;
5103 mas->last = max;
5105 while (!mas_rev_awalk(mas, size, &min, &max)) {
5106 if (last == mas->node) {
5107 if (!mas_rewind_node(mas))
5110 last = mas->node;
5114 if (mas_is_err(mas))
5115 return xa_err(mas->node);
5117 if (unlikely(mas->offset == MAPLE_NODE_SLOTS))
5121 if (max < mas->last)
5122 mas->last = max;
5124 mas->index = mas->last - size + 1;
5131 * @mas: The maple state
5342 if (!mas_is_active(wr_mas->mas)) {
5343 if (mas_is_start(wr_mas->mas))
5346 if (unlikely(mas_is_paused(wr_mas->mas)))
5349 if (unlikely(mas_is_none(wr_mas->mas)))
5352 if (unlikely(mas_is_overflow(wr_mas->mas)))
5355 if (unlikely(mas_is_underflow(wr_mas->mas)))
5364 if (wr_mas->mas->last > wr_mas->mas->max)
5370 if (mte_is_leaf(wr_mas->mas->node) &&
5371 wr_mas->mas->last == wr_mas->mas->max)
5377 mas_reset(wr_mas->mas);
5384 * @mas: The maple state.
5387 * The @mas->index and @mas->last is used to set the range for the @entry.
5388 * Note: The @mas should have pre-allocated entries to ensure there is memory to
5391 * Return: the first entry between mas->index and mas->last or %NULL.
5393 void *mas_store(struct ma_state *mas, void *entry)
5395 MA_WR_STATE(wr_mas, mas, entry);
5397 trace_ma_write(__func__, mas, 0, entry);
5399 if (MAS_WARN_ON(mas, mas->index > mas->last))
5400 pr_err("Error %lX > %lX %p\n", mas->index, mas->last, entry);
5402 if (mas->index > mas->last) {
5403 mas_set_err(mas, -EINVAL);
5423 * @mas: The maple state
5430 int mas_store_gfp(struct ma_state *mas, void *entry, gfp_t gfp)
5432 MA_WR_STATE(wr_mas, mas, entry);
5435 trace_ma_write(__func__, mas, 0, entry);
5438 if (unlikely(mas_nomem(mas, gfp)))
5441 if (unlikely(mas_is_err(mas)))
5442 return xa_err(mas->node);
5451 * @mas: The maple state
5454 void mas_store_prealloc(struct ma_state *mas, void *entry)
5456 MA_WR_STATE(wr_mas, mas, entry);
5459 trace_ma_write(__func__, mas, 0, entry);
5461 MAS_WR_BUG_ON(&wr_mas, mas_is_err(mas));
5462 mas_destroy(mas);
5468 * @mas: The maple state
5474 int mas_preallocate(struct ma_state *mas, void *entry, gfp_t gfp)
5476 MA_WR_STATE(wr_mas, mas, entry);
5482 if (unlikely(!mas->index && mas->last == ULONG_MAX))
5486 wr_mas.content = mas_start(mas);
5488 if (unlikely(mas_is_none(mas) || mas_is_ptr(mas)))
5493 request = 1 + mas_mt_height(mas) * 3;
5499 if (wr_mas.r_min == mas->index && wr_mas.r_max == mas->last)
5508 if (!mt_in_rcu(mas->tree))
5511 if (wr_mas.offset_end - mas->offset == 1)
5517 request = 1 + mas_mt_height(mas) * 2;
5522 if (unlikely(mte_is_root(mas->node)))
5527 request = mas_mt_height(mas) * 2 - 1;
5531 mas_node_count_gfp(mas, request, gfp);
5532 mas->mas_flags |= MA_STATE_PREALLOC;
5533 if (likely(!mas_is_err(mas)))
5536 mas_set_alloc_req(mas, 0);
5537 ret = xa_err(mas->node);
5538 mas_reset(mas);
5539 mas_destroy(mas);
5540 mas_reset(mas);
5547 * @mas: The maple state
5553 void mas_destroy(struct ma_state *mas)
5564 if (mas->mas_flags & MA_STATE_REBALANCE) {
5567 mas_start(mas);
5568 mtree_range_walk(mas);
5569 end = mas_data_end(mas) + 1;
5570 if (end < mt_min_slot_count(mas->node) - 1)
5571 mas_destroy_rebalance(mas, end);
5573 mas->mas_flags &= ~MA_STATE_REBALANCE;
5575 mas->mas_flags &= ~(MA_STATE_BULK|MA_STATE_PREALLOC);
5577 total = mas_allocated(mas);
5579 node = mas->alloc;
5580 mas->alloc = node->slot[0];
5591 mas->alloc = NULL;
5597 * @mas: The maple state
5602 * for speed. Please call mas_destroy() on the @mas after inserting the entries
5607 int mas_expected_entries(struct ma_state *mas, unsigned long nr_entries)
5610 struct maple_enode *enode = mas->node;
5625 mas->mas_flags |= MA_STATE_BULK;
5633 if (!mt_is_alloc(mas->tree))
5641 mas_node_count_gfp(mas, nr_nodes + 3, GFP_KERNEL);
5644 mas->mas_flags |= MA_STATE_PREALLOC;
5646 if (!mas_is_err(mas))
5649 ret = xa_err(mas->node);
5650 mas->node = enode;
5651 mas_destroy(mas);
5657 static inline bool mas_next_setup(struct ma_state *mas, unsigned long max,
5660 bool was_none = mas_is_none(mas);
5662 if (unlikely(mas->last >= max)) {
5663 mas->node = MAS_OVERFLOW;
5667 if (mas_is_active(mas))
5670 if (mas_is_none(mas) || mas_is_paused(mas)) {
5671 mas->node = MAS_START;
5672 } else if (mas_is_overflow(mas)) {
5674 mas->node = MAS_START;
5675 } else if (mas_is_underflow(mas)) {
5676 mas->node = MAS_START;
5677 *entry = mas_walk(mas);
5682 if (mas_is_start(mas))
5683 *entry = mas_walk(mas); /* Retries on dead nodes handled by mas_walk */
5685 if (mas_is_ptr(mas)) {
5687 if (was_none && mas->index == 0) {
5688 mas->index = mas->last = 0;
5691 mas->index = 1;
5692 mas->last = ULONG_MAX;
5693 mas->node = MAS_NONE;
5697 if (mas_is_none(mas))
5705 * @mas: The maple state
5708 * Returns the next entry after @mas->index.
5714 void *mas_next(struct ma_state *mas, unsigned long max)
5718 if (mas_next_setup(mas, max, &entry))
5722 return mas_next_slot(mas, max, false, true);
5728 * @mas: The maple state
5731 * Sets @mas->index and @mas->last to the range.
5737 void *mas_next_range(struct ma_state *mas, unsigned long max)
5741 if (mas_next_setup(mas, max, &entry))
5745 return mas_next_slot(mas, max, true, true);
5764 MA_STATE(mas, mt, index, index);
5767 entry = mas_next(&mas, max);
5773 static inline bool mas_prev_setup(struct ma_state *mas, unsigned long min,
5776 if (unlikely(mas->index <= min)) {
5777 mas->node = MAS_UNDERFLOW;
5781 if (mas_is_active(mas))
5784 if (mas_is_overflow(mas)) {
5785 mas->node = MAS_START;
5786 *entry = mas_walk(mas);
5791 if (mas_is_none(mas) || mas_is_paused(mas)) {
5792 mas->node = MAS_START;
5793 } else if (mas_is_underflow(mas)) {
5795 mas->node = MAS_START;
5798 if (mas_is_start(mas))
5799 mas_walk(mas);
5801 if (unlikely(mas_is_ptr(mas))) {
5802 if (!mas->index)
5804 mas->index = mas->last = 0;
5805 *entry = mas_root(mas);
5809 if (mas_is_none(mas)) {
5810 if (mas->index) {
5812 mas->index = mas->last = 0;
5813 mas->node = MAS_ROOT;
5814 *entry = mas_root(mas);
5823 mas->node = MAS_NONE;
5829 * @mas: The maple state
5833 * Will reset mas to MAS_START if the node is MAS_NONE. Will stop on not
5838 void *mas_prev(struct ma_state *mas, unsigned long min)
5842 if (mas_prev_setup(mas, min, &entry))
5845 return mas_prev_slot(mas, min, false, true);
5851 * @mas: The maple state
5854 * Sets @mas->index and @mas->last to the range.
5856 * Will reset mas to MAS_START if the node is MAS_NONE. Will stop on not
5861 void *mas_prev_range(struct ma_state *mas, unsigned long min)
5865 if (mas_prev_setup(mas, min, &entry))
5868 return mas_prev_slot(mas, min, true, true);
5887 MA_STATE(mas, mt, index, index);
5890 entry = mas_prev(&mas, min);
5898 * @mas: The maple state to pause
5903 * the lock. It resets the @mas to be suitable for the next iteration
5909 void mas_pause(struct ma_state *mas)
5911 mas->node = MAS_PAUSE;
5917 * @mas: The maple state
5923 static inline bool mas_find_setup(struct ma_state *mas, unsigned long max,
5926 if (mas_is_active(mas)) {
5927 if (mas->last < max)
5933 if (mas_is_paused(mas)) {
5934 if (unlikely(mas->last >= max))
5937 mas->index = ++mas->last;
5938 mas->node = MAS_START;
5939 } else if (mas_is_none(mas)) {
5940 if (unlikely(mas->last >= max))
5943 mas->index = mas->last;
5944 mas->node = MAS_START;
5945 } else if (mas_is_overflow(mas) || mas_is_underflow(mas)) {
5946 if (mas->index > max) {
5947 mas->node = MAS_OVERFLOW;
5951 mas->node = MAS_START;
5954 if (mas_is_start(mas)) {
5956 if (mas->index > max)
5959 *entry = mas_walk(mas);
5965 if (unlikely(!mas_searchable(mas))) {
5966 if (unlikely(mas_is_ptr(mas)))
5972 if (mas->index == max)
5978 mas->node = MAS_NONE;
5979 mas->index = 1;
5980 mas->last = ULONG_MAX;
5985 * mas_find() - On the first call, find the entry at or after mas->index up to
5986 * %max. Otherwise, find the entry after mas->index.
5987 * @mas: The maple state
5992 * May set @mas->node to MAS_NONE.
5996 void *mas_find(struct ma_state *mas, unsigned long max)
6000 if (mas_find_setup(mas, max, &entry))
6004 return mas_next_slot(mas, max, false, false);
6010 * mas->index up to %max. Otherwise, advance to the next slot mas->index.
6011 * @mas: The maple state
6016 * May set @mas->node to MAS_NONE.
6020 void *mas_find_range(struct ma_state *mas, unsigned long max)
6024 if (mas_find_setup(mas, max, &entry))
6028 return mas_next_slot(mas, max, true, false);
6034 * @mas: The maple state
6040 static inline bool mas_find_rev_setup(struct ma_state *mas, unsigned long min,
6043 if (mas_is_active(mas)) {
6044 if (mas->index > min)
6050 if (mas_is_paused(mas)) {
6051 if (unlikely(mas->index <= min)) {
6052 mas->node = MAS_NONE;
6055 mas->node = MAS_START;
6056 mas->last = --mas->index;
6057 } else if (mas_is_none(mas)) {
6058 if (mas->index <= min)
6061 mas->last = mas->index;
6062 mas->node = MAS_START;
6063 } else if (mas_is_underflow(mas) || mas_is_overflow(mas)) {
6064 if (mas->last <= min) {
6065 mas->node = MAS_UNDERFLOW;
6069 mas->node = MAS_START;
6072 if (mas_is_start(mas)) {
6074 if (mas->index < min)
6077 *entry = mas_walk(mas);
6082 if (unlikely(!mas_searchable(mas))) {
6083 if (mas_is_ptr(mas))
6086 if (mas_is_none(mas)) {
6091 mas->last = mas->index = 0;
6092 mas->node = MAS_ROOT;
6093 *entry = mas_root(mas);
6098 if (mas->index < min)
6104 mas->node = MAS_NONE;
6110 * mas->index down to %min. Otherwise find the first non-null entry below
6111 * mas->index down to %min.
6112 * @mas: The maple state
6117 * May set @mas->node to MAS_NONE.
6121 void *mas_find_rev(struct ma_state *mas, unsigned long min)
6125 if (mas_find_rev_setup(mas, min, &entry))
6129 return mas_prev_slot(mas, min, false, false);
6136 * below mas->index down to %min. Otherwise advance to the previous slot after
6137 * mas->index down to %min.
6138 * @mas: The maple state
6143 * May set @mas->node to MAS_NONE.
6147 void *mas_find_range_rev(struct ma_state *mas, unsigned long min)
6151 if (mas_find_rev_setup(mas, min, &entry))
6155 return mas_prev_slot(mas, min, true, false);
6162 * @mas: The maple state
6165 * Searches for @mas->index, sets @mas->index and @mas->last to the range and
6168 * Return: the entry that was erased or %NULL, @mas->index and @mas->last are updated.
6170 void *mas_erase(struct ma_state *mas)
6173 MA_WR_STATE(wr_mas, mas, NULL);
6175 if (mas_is_none(mas) || mas_is_paused(mas))
6176 mas->node = MAS_START;
6179 entry = mas_state_walk(mas);
6185 mas_reset(mas);
6188 if (mas_nomem(mas, GFP_KERNEL))
6198 * @mas: The maple state
6202 bool mas_nomem(struct ma_state *mas, gfp_t gfp)
6203 __must_hold(mas->tree->ma_lock)
6205 if (likely(mas->node != MA_ERROR(-ENOMEM))) {
6206 mas_destroy(mas);
6210 if (gfpflags_allow_blocking(gfp) && !mt_external_lock(mas->tree)) {
6211 mtree_unlock(mas->tree);
6212 mas_alloc_nodes(mas, gfp);
6213 mtree_lock(mas->tree);
6215 mas_alloc_nodes(mas, gfp);
6218 if (!mas_allocated(mas))
6221 mas->node = MAS_START;
6241 MA_STATE(mas, mt, index, index);
6244 trace_ma_read(__func__, &mas);
6247 entry = mas_start(&mas);
6248 if (unlikely(mas_is_none(&mas)))
6251 if (unlikely(mas_is_ptr(&mas))) {
6258 entry = mtree_lookup_walk(&mas);
6259 if (!entry && unlikely(mas_is_start(&mas)))
6284 MA_STATE(mas, mt, index, last);
6285 MA_WR_STATE(wr_mas, &mas, entry);
6287 trace_ma_write(__func__, &mas, 0, entry);
6297 if (mas_nomem(&mas, gfp))
6301 if (mas_is_err(&mas))
6302 return xa_err(mas.node);
6384 MA_STATE(mas, mt, 0, 0);
6393 ret = mas_empty_area(&mas, min, max, size);
6397 mas_insert(&mas, entry);
6402 if (mas_nomem(&mas, gfp))
6405 if (mas_is_err(&mas))
6406 ret = xa_err(mas.node);
6408 *startp = mas.index;
6422 MA_STATE(mas, mt, 0, 0);
6431 ret = mas_empty_area_rev(&mas, min, max, size);
6435 mas_insert(&mas, entry);
6440 if (mas_nomem(&mas, gfp))
6443 if (mas_is_err(&mas))
6444 ret = xa_err(mas.node);
6446 *startp = mas.index;
6468 MA_STATE(mas, mt, index, index);
6469 trace_ma_op(__func__, &mas);
6472 entry = mas_erase(&mas);
6529 MA_STATE(mas, mt, *index, *index);
6535 trace_ma_read(__func__, &mas);
6542 entry = mas_state_walk(&mas);
6543 if (mas_is_start(&mas))
6552 while (mas_searchable(&mas) && (mas.last < max)) {
6553 entry = mas_next_entry(&mas, max);
6563 *index = mas.last + 1;
6636 * @mas: The maple state
6637 * @index: The index to restore in @mas.
6640 * Return: 1 if @mas has been reset to MAS_START, 0 otherwise.
6642 static inline int mas_dead_node(struct ma_state *mas, unsigned long index)
6644 if (unlikely(!mas_searchable(mas) || mas_is_start(mas)))
6647 if (likely(!mte_dead_node(mas->node)))
6650 mas_rewalk(mas, index);
6676 * @mas: The maple state
6681 static inline struct maple_enode *mas_get_slot(struct ma_state *mas,
6684 return mas_slot(mas, ma_slots(mas_mn(mas), mte_node_type(mas->node)),
6689 static void mas_dfs_postorder(struct ma_state *mas, unsigned long max)
6692 struct maple_enode *p = MAS_NONE, *mn = mas->node;
6695 mas_next_node(mas, mas_mn(mas), max);
6696 if (!mas_is_none(mas))
6702 mas->node = mn;
6703 mas_ascend(mas);
6705 p = mas->node;
6706 p_min = mas->min;
6707 p_max = mas->max;
6708 mas_prev_node(mas, 0);
6709 } while (!mas_is_none(mas));
6711 mas->node = p;
6712 mas->max = p_max;
6713 mas->min = p_min;
6923 static void mas_validate_gaps(struct ma_state *mas)
6925 struct maple_enode *mte = mas->node;
6927 enum maple_type mt = mte_node_type(mas->node);
6929 unsigned long p_end, p_start = mas->min;
6937 if (mas_get_slot(mas, i)) {
6950 p_end = mas_safe_pivot(mas, pivots, i, mt);
6953 if (!mas_get_slot(mas, i))
6956 void *entry = mas_get_slot(mas, i);
6959 MT_BUG_ON(mas->tree, !entry);
6963 mas_mn(mas), i, gap, p_end, p_start,
6965 MT_BUG_ON(mas->tree, gap > p_end - p_start + 1);
6973 if (p_end >= mas->max)
6982 MT_BUG_ON(mas->tree, 1);
6988 MT_BUG_ON(mas->tree, 1);
6991 MT_BUG_ON(mas->tree, !gaps);
6996 MT_BUG_ON(mas->tree, 1);
7004 p_slot = mte_parent_slot(mas->node);
7006 MT_BUG_ON(mas->tree, max_gap > mas->max);
7007 if (ma_gaps(p_mn, mas_parent_type(mas, mte))[p_slot] != max_gap) {
7009 mt_dump(mas->tree, mt_dump_hex);
7010 MT_BUG_ON(mas->tree, 1);
7014 static void mas_validate_parent_slot(struct ma_state *mas)
7023 if (mte_is_root(mas->node))
7026 p_slot = mte_parent_slot(mas->node);
7027 p_type = mas_parent_type(mas, mas->node);
7028 parent = mte_parent(mas->node);
7030 MT_BUG_ON(mas->tree, mas_mn(mas) == parent);
7035 node = mas_slot(mas, slots, i);
7037 if (node != mas->node)
7039 parent, i, mas_mn(mas));
7040 MT_BUG_ON(mas->tree, node != mas->node);
7041 } else if (node == mas->node) {
7043 mas_mn(mas), parent, i, p_slot);
7044 MT_BUG_ON(mas->tree, node == mas->node);
7049 static void mas_validate_child_slot(struct ma_state *mas)
7051 enum maple_type type = mte_node_type(mas->node);
7052 void __rcu **slots = ma_slots(mte_to_node(mas->node), type);
7053 unsigned long *pivots = ma_pivots(mte_to_node(mas->node), type);
7057 if (mte_is_leaf(mas->node))
7061 child = mas_slot(mas, slots, i);
7065 mas_mn(mas), i);
7066 MT_BUG_ON(mas->tree, 1);
7071 mas_mn(mas), i, mte_to_node(child),
7073 MT_BUG_ON(mas->tree, 1);
7076 if (mte_parent(child) != mte_to_node(mas->node)) {
7079 mte_to_node(mas->node));
7080 MT_BUG_ON(mas->tree, 1);
7083 if (i < mt_pivots[type] && pivots[i] == mas->max)
7089 * Validate all pivots are within mas->min and mas->max, check metadata ends
7093 static void mas_validate_limits(struct ma_state *mas)
7097 enum maple_type type = mte_node_type(mas->node);
7098 void __rcu **slots = ma_slots(mte_to_node(mas->node), type);
7099 unsigned long *pivots = ma_pivots(mas_mn(mas), type);
7104 piv = mas_safe_pivot(mas, pivots, i, type);
7108 mas_mn(mas), i);
7109 MAS_WARN_ON(mas, 1);
7114 mas_mn(mas), i, piv, prev_piv);
7115 MAS_WARN_ON(mas, piv < prev_piv);
7118 if (piv < mas->min) {
7119 pr_err("%p[%u] %lu < %lu\n", mas_mn(mas), i,
7120 piv, mas->min);
7121 MAS_WARN_ON(mas, piv < mas->min);
7123 if (piv > mas->max) {
7124 pr_err("%p[%u] %lu > %lu\n", mas_mn(mas), i,
7125 piv, mas->max);
7126 MAS_WARN_ON(mas, piv > mas->max);
7129 if (piv == mas->max)
7133 if (mas_data_end(mas) != i) {
7135 mas_mn(mas), mas_data_end(mas), i);
7136 MT_BUG_ON(mas->tree, 1);
7140 void *entry = mas_slot(mas, slots, i);
7143 pr_err("%p[%u] should not have entry %p\n", mas_mn(mas),
7145 MT_BUG_ON(mas->tree, entry != NULL);
7155 mas_mn(mas), i, piv);
7156 MAS_WARN_ON(mas, i < mt_pivots[type] - 1);
7166 MA_STATE(mas, mt, 0, 0);
7168 mas_start(&mas);
7169 if (mas_is_none(&mas) || (mas.node == MAS_ROOT))
7172 while (!mte_is_leaf(mas.node))
7173 mas_descend(&mas);
7175 slots = ma_slots(mte_to_node(mas.node), mte_node_type(mas.node));
7177 entry = mas_slot(&mas, slots, offset);
7180 mas_mn(&mas), offset);
7184 if (offset == mas_data_end(&mas)) {
7185 mas_next_node(&mas, mas_mn(&mas), ULONG_MAX);
7186 if (mas_is_none(&mas))
7189 slots = ma_slots(mte_to_node(mas.node),
7190 mte_node_type(mas.node));
7195 } while (!mas_is_none(&mas));
7200 * 1. The limits (pivots are within mas->min to mas->max)
7207 MA_STATE(mas, mt, 0, 0);
7209 mas_start(&mas);
7210 if (!mas_searchable(&mas))
7213 while (!mte_is_leaf(mas.node))
7214 mas_descend(&mas);
7216 while (!mas_is_none(&mas)) {
7217 MAS_WARN_ON(&mas, mte_dead_node(mas.node));
7218 end = mas_data_end(&mas);
7219 if (MAS_WARN_ON(&mas, (end < mt_min_slot_count(mas.node)) &&
7220 (mas.max != ULONG_MAX))) {
7221 pr_err("Invalid size %u of %p\n", end, mas_mn(&mas));
7224 mas_validate_parent_slot(&mas);
7225 mas_validate_limits(&mas);
7226 mas_validate_child_slot(&mas);
7228 mas_validate_gaps(&mas);
7229 mas_dfs_postorder(&mas, ULONG_MAX);
7238 void mas_dump(const struct ma_state *mas)
7240 pr_err("MAS: tree=%p enode=%p ", mas->tree, mas->node);
7241 if (mas_is_none(mas))
7243 else if (mas_is_ptr(mas))
7245 else if (mas_is_start(mas))
7247 else if (mas_is_paused(mas))
7250 pr_err("[%u] index=%lx last=%lx\n", mas->offset, mas->index, mas->last);
7252 mas->min, mas->max, mas->alloc, mas->depth, mas->mas_flags);
7253 if (mas->index > mas->last)