Lines Matching defs:xas
19 * @xas is the 'xarray operation state'. It may be either a pointer to
36 static inline void xas_lock_type(struct xa_state *xas, unsigned int lock_type)
39 xas_lock_irq(xas);
41 xas_lock_bh(xas);
43 xas_lock(xas);
46 static inline void xas_unlock_type(struct xa_state *xas, unsigned int lock_type)
49 xas_unlock_irq(xas);
51 xas_unlock_bh(xas);
53 xas_unlock(xas);
119 * @xas: Array operation state.
124 static void xas_squash_marks(const struct xa_state *xas)
127 unsigned int limit = xas->xa_offset + xas->xa_sibs + 1;
129 if (!xas->xa_sibs)
133 unsigned long *marks = xas->xa_node->marks[mark];
134 if (find_next_bit(marks, limit, xas->xa_offset + 1) == limit)
136 __set_bit(xas->xa_offset, marks);
137 bitmap_clear(marks, xas->xa_offset + 1, xas->xa_sibs);
147 static void xas_set_offset(struct xa_state *xas)
149 xas->xa_offset = get_offset(xas->xa_index, xas->xa_node);
153 static void xas_move_index(struct xa_state *xas, unsigned long offset)
155 unsigned int shift = xas->xa_node->shift;
156 xas->xa_index &= ~XA_CHUNK_MASK << shift;
157 xas->xa_index += offset << shift;
160 static void xas_advance(struct xa_state *xas)
162 xas->xa_offset++;
163 xas_move_index(xas, xas->xa_offset);
166 static void *set_bounds(struct xa_state *xas)
168 xas->xa_node = XAS_BOUNDS;
173 * Starts a walk. If the @xas is already valid, we assume that it's on
176 * of the xarray, return NULL without changing @xas->xa_node. Otherwise
177 * set @xas->xa_node to NULL and return the current head of the array.
179 static void *xas_start(struct xa_state *xas)
183 if (xas_valid(xas))
184 return xas_reload(xas);
185 if (xas_error(xas))
188 entry = xa_head(xas->xa);
190 if (xas->xa_index)
191 return set_bounds(xas);
193 if ((xas->xa_index >> xa_to_node(entry)->shift) > XA_CHUNK_MASK)
194 return set_bounds(xas);
197 xas->xa_node = NULL;
201 static void *xas_descend(struct xa_state *xas, struct xa_node *node)
203 unsigned int offset = get_offset(xas->xa_index, node);
204 void *entry = xa_entry(xas->xa, node, offset);
206 xas->xa_node = node;
209 entry = xa_entry(xas->xa, node, offset);
212 xas->xa_offset = offset;
218 * @xas: XArray operation state.
220 * Usually walks the @xas to the appropriate state to load the entry
222 * @xas is in an error state. xas_load() will never expand the tree.
226 * present within the range specified by @xas.
231 void *xas_load(struct xa_state *xas)
233 void *entry = xas_start(xas);
238 if (xas->xa_shift > node->shift)
240 entry = xas_descend(xas, node);
263 * @xas: XArray operation state.
267 static void xas_destroy(struct xa_state *xas)
269 struct xa_node *next, *node = xas->xa_alloc;
275 xas->xa_alloc = node = next;
281 * @xas: XArray operation state.
286 * If it fails, @xas is flagged as needing memory to continue. The caller
297 bool xas_nomem(struct xa_state *xas, gfp_t gfp)
299 if (xas->xa_node != XA_ERROR(-ENOMEM)) {
300 xas_destroy(xas);
303 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
305 xas->xa_alloc = kmem_cache_alloc(radix_tree_node_cachep, gfp);
306 if (!xas->xa_alloc)
308 xas->xa_alloc->parent = NULL;
309 XA_NODE_BUG_ON(xas->xa_alloc, !list_empty(&xas->xa_alloc->private_list));
310 xas->xa_node = XAS_RESTART;
317 * @xas: XArray operation state.
324 static bool __xas_nomem(struct xa_state *xas, gfp_t gfp)
325 __must_hold(xas->xa->xa_lock)
327 unsigned int lock_type = xa_lock_type(xas->xa);
329 if (xas->xa_node != XA_ERROR(-ENOMEM)) {
330 xas_destroy(xas);
333 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
336 xas_unlock_type(xas, lock_type);
337 xas->xa_alloc = kmem_cache_alloc(radix_tree_node_cachep, gfp);
338 xas_lock_type(xas, lock_type);
340 xas->xa_alloc = kmem_cache_alloc(radix_tree_node_cachep, gfp);
342 if (!xas->xa_alloc)
344 xas->xa_alloc->parent = NULL;
345 XA_NODE_BUG_ON(xas->xa_alloc, !list_empty(&xas->xa_alloc->private_list));
346 xas->xa_node = XAS_RESTART;
350 static void xas_update(struct xa_state *xas, struct xa_node *node)
352 if (xas->xa_update)
353 xas->xa_update(node);
358 static void *xas_alloc(struct xa_state *xas, unsigned int shift)
360 struct xa_node *parent = xas->xa_node;
361 struct xa_node *node = xas->xa_alloc;
363 if (xas_invalid(xas))
367 xas->xa_alloc = NULL;
371 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT)
376 xas_set_err(xas, -ENOMEM);
382 node->offset = xas->xa_offset;
385 xas_update(xas, parent);
392 RCU_INIT_POINTER(node->parent, xas->xa_node);
393 node->array = xas->xa;
400 static unsigned long xas_size(const struct xa_state *xas)
402 return (xas->xa_sibs + 1UL) << xas->xa_shift;
408 * in order to add the entry described by @xas. Because we cannot store a
412 static unsigned long xas_max(struct xa_state *xas)
414 unsigned long max = xas->xa_index;
417 if (xas->xa_shift || xas->xa_sibs) {
418 unsigned long mask = xas_size(xas) - 1;
436 static void xas_shrink(struct xa_state *xas)
438 struct xarray *xa = xas->xa;
439 struct xa_node *node = xas->xa_node;
454 xas->xa_node = XAS_BOUNDS;
464 xas_update(xas, node);
475 * @xas: Array operation state.
477 * Attempts to delete the @xas->xa_node. This will fail if xa->node has
480 static void xas_delete_node(struct xa_state *xas)
482 struct xa_node *node = xas->xa_node;
491 parent = xa_parent_locked(xas->xa, node);
492 xas->xa_node = parent;
493 xas->xa_offset = node->offset;
497 xas->xa->xa_head = NULL;
498 xas->xa_node = XAS_BOUNDS;
502 parent->slots[xas->xa_offset] = NULL;
506 xas_update(xas, node);
510 xas_shrink(xas);
515 * @xas: Array operation state.
522 static void xas_free_nodes(struct xa_state *xas, struct xa_node *top)
528 void *entry = xa_entry_locked(xas->xa, node, offset);
541 parent = xa_parent_locked(xas->xa, node);
545 xas_update(xas, node);
556 * sufficient height to be able to contain @xas->xa_index
558 static int xas_expand(struct xa_state *xas, void *head)
560 struct xarray *xa = xas->xa;
563 unsigned long max = xas_max(xas);
575 xas->xa_node = NULL;
581 node = xas_alloc(xas, shift);
616 xas_update(xas, node);
621 xas->xa_node = node;
627 * @xas: XArray operation state.
636 * slot, returns %NULL and indicates the error in @xas.
638 static void *xas_create(struct xa_state *xas, bool allow_root)
640 struct xarray *xa = xas->xa;
643 struct xa_node *node = xas->xa_node;
645 unsigned int order = xas->xa_shift;
649 xas->xa_node = NULL;
652 shift = xas_expand(xas, entry);
659 } else if (xas_error(xas)) {
662 unsigned int offset = xas->xa_offset;
676 node = xas_alloc(xas, shift);
687 entry = xas_descend(xas, node);
688 slot = &node->slots[xas->xa_offset];
696 * @xas: XArray operation state.
698 * Creates all of the slots in the range covered by @xas. Sets @xas to
703 void xas_create_range(struct xa_state *xas)
705 unsigned long index = xas->xa_index;
706 unsigned char shift = xas->xa_shift;
707 unsigned char sibs = xas->xa_sibs;
709 xas->xa_index |= ((sibs + 1UL) << shift) - 1;
710 if (xas_is_node(xas) && xas->xa_node->shift == xas->xa_shift)
711 xas->xa_offset |= sibs;
712 xas->xa_shift = 0;
713 xas->xa_sibs = 0;
716 xas_create(xas, true);
717 if (xas_error(xas))
719 if (xas->xa_index <= (index | XA_CHUNK_MASK))
721 xas->xa_index -= XA_CHUNK_SIZE;
724 struct xa_node *node = xas->xa_node;
727 xas->xa_node = xa_parent_locked(xas->xa, node);
728 xas->xa_offset = node->offset - 1;
735 xas->xa_shift = shift;
736 xas->xa_sibs = sibs;
737 xas->xa_index = index;
740 xas->xa_index = index;
741 if (xas->xa_node)
742 xas_set_offset(xas);
746 static void update_node(struct xa_state *xas, struct xa_node *node,
756 xas_update(xas, node);
758 xas_delete_node(xas);
763 * @xas: XArray operation state.
766 * If @xas is operating on a multi-index entry, the entry returned by this
774 void *xas_store(struct xa_state *xas, void *entry)
777 void __rcu **slot = &xas->xa->xa_head;
786 first = xas_create(xas, allow_root);
788 first = xas_load(xas);
791 if (xas_invalid(xas))
793 node = xas->xa_node;
794 if (node && (xas->xa_shift < node->shift))
795 xas->xa_sibs = 0;
796 if ((first == entry) && !xas->xa_sibs)
800 offset = xas->xa_offset;
801 max = xas->xa_offset + xas->xa_sibs;
804 if (xas->xa_sibs)
805 xas_squash_marks(xas);
808 xas_init_marks(xas);
820 xas_free_nodes(xas, xa_to_node(next));
829 entry = xa_mk_sibling(xas->xa_offset);
834 next = xa_entry_locked(xas->xa, node, ++offset);
843 update_node(xas, node, count, values);
850 * @xas: XArray operation state.
853 * Return: true if the mark is set, false if the mark is clear or @xas
856 bool xas_get_mark(const struct xa_state *xas, xa_mark_t mark)
858 if (xas_invalid(xas))
860 if (!xas->xa_node)
861 return xa_marked(xas->xa, mark);
862 return node_get_mark(xas->xa_node, xas->xa_offset, mark);
868 * @xas: XArray operation state.
872 * on all the ancestor entries. Does nothing if @xas has not been walked to
875 void xas_set_mark(const struct xa_state *xas, xa_mark_t mark)
877 struct xa_node *node = xas->xa_node;
878 unsigned int offset = xas->xa_offset;
880 if (xas_invalid(xas))
887 node = xa_parent_locked(xas->xa, node);
890 if (!xa_marked(xas->xa, mark))
891 xa_mark_set(xas->xa, mark);
897 * @xas: XArray operation state.
902 * @xas has not been walked to an entry, or is in an error state.
904 void xas_clear_mark(const struct xa_state *xas, xa_mark_t mark)
906 struct xa_node *node = xas->xa_node;
907 unsigned int offset = xas->xa_offset;
909 if (xas_invalid(xas))
919 node = xa_parent_locked(xas->xa, node);
922 if (xa_marked(xas->xa, mark))
923 xa_mark_clear(xas->xa, mark);
929 * @xas: Array operations state.
931 * Initialise all marks for the entry specified by @xas. If we're tracking
938 void xas_init_marks(const struct xa_state *xas)
943 if (xa_track_free(xas->xa) && mark == XA_FREE_MARK)
944 xas_set_mark(xas, mark);
946 xas_clear_mark(xas, mark);
990 * @xas: XArray operation state.
998 * entries of the order stored in the @xas.
1002 void xas_split_alloc(struct xa_state *xas, void *entry, unsigned int order,
1006 unsigned int mask = xas->xa_sibs;
1009 if (WARN_ON(xas->xa_shift + 2 * XA_CHUNK_SHIFT < order))
1011 if (xas->xa_shift + XA_CHUNK_SHIFT > order)
1022 node->array = xas->xa;
1031 RCU_INIT_POINTER(node->parent, xas->xa_alloc);
1032 xas->xa_alloc = node;
1037 xas_destroy(xas);
1038 xas_set_err(xas, -ENOMEM);
1044 * @xas: XArray operation state.
1052 void xas_split(struct xa_state *xas, void *entry, unsigned int order)
1057 void *curr = xas_load(xas);
1060 node = xas->xa_node;
1064 marks = node_get_marks(node, xas->xa_offset);
1066 offset = xas->xa_offset + sibs;
1068 if (xas->xa_shift < node->shift) {
1069 struct xa_node *child = xas->xa_alloc;
1071 xas->xa_alloc = rcu_dereference_raw(child->parent);
1083 xas_update(xas, child);
1085 unsigned int canon = offset - xas->xa_sibs;
1093 (xas->xa_sibs + 1);
1095 } while (offset-- > xas->xa_offset);
1098 xas_update(xas, node);
1105 * @xas: XArray operation state.
1110 * the lock. It resets the @xas to be suitable for the next iteration
1118 void xas_pause(struct xa_state *xas)
1120 struct xa_node *node = xas->xa_node;
1122 if (xas_invalid(xas))
1125 xas->xa_node = XAS_RESTART;
1127 unsigned long offset = xas->xa_offset;
1129 if (!xa_is_sibling(xa_entry(xas->xa, node, offset)))
1132 xas->xa_index += (offset - xas->xa_offset) << node->shift;
1133 if (xas->xa_index == 0)
1134 xas->xa_node = XAS_BOUNDS;
1136 xas->xa_index++;
1143 * @xas: XArray operation state.
1148 void *__xas_prev(struct xa_state *xas)
1152 if (!xas_frozen(xas->xa_node))
1153 xas->xa_index--;
1154 if (!xas->xa_node)
1155 return set_bounds(xas);
1156 if (xas_not_node(xas->xa_node))
1157 return xas_load(xas);
1159 if (xas->xa_offset != get_offset(xas->xa_index, xas->xa_node))
1160 xas->xa_offset--;
1162 while (xas->xa_offset == 255) {
1163 xas->xa_offset = xas->xa_node->offset - 1;
1164 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1165 if (!xas->xa_node)
1166 return set_bounds(xas);
1170 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1174 xas->xa_node = xa_to_node(entry);
1175 xas_set_offset(xas);
1182 * @xas: XArray operation state.
1187 void *__xas_next(struct xa_state *xas)
1191 if (!xas_frozen(xas->xa_node))
1192 xas->xa_index++;
1193 if (!xas->xa_node)
1194 return set_bounds(xas);
1195 if (xas_not_node(xas->xa_node))
1196 return xas_load(xas);
1198 if (xas->xa_offset != get_offset(xas->xa_index, xas->xa_node))
1199 xas->xa_offset++;
1201 while (xas->xa_offset == XA_CHUNK_SIZE) {
1202 xas->xa_offset = xas->xa_node->offset + 1;
1203 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1204 if (!xas->xa_node)
1205 return set_bounds(xas);
1209 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1213 xas->xa_node = xa_to_node(entry);
1214 xas_set_offset(xas);
1221 * @xas: XArray operation state.
1224 * If the @xas has not yet been walked to an entry, return the entry
1225 * which has an index >= xas.xa_index. If it has been walked, the entry
1230 * is set to the smallest index not yet in the array. This allows @xas
1235 void *xas_find(struct xa_state *xas, unsigned long max)
1239 if (xas_error(xas) || xas->xa_node == XAS_BOUNDS)
1241 if (xas->xa_index > max)
1242 return set_bounds(xas);
1244 if (!xas->xa_node) {
1245 xas->xa_index = 1;
1246 return set_bounds(xas);
1247 } else if (xas->xa_node == XAS_RESTART) {
1248 entry = xas_load(xas);
1249 if (entry || xas_not_node(xas->xa_node))
1251 } else if (!xas->xa_node->shift &&
1252 xas->xa_offset != (xas->xa_index & XA_CHUNK_MASK)) {
1253 xas->xa_offset = ((xas->xa_index - 1) & XA_CHUNK_MASK) + 1;
1256 xas_advance(xas);
1258 while (xas->xa_node && (xas->xa_index <= max)) {
1259 if (unlikely(xas->xa_offset == XA_CHUNK_SIZE)) {
1260 xas->xa_offset = xas->xa_node->offset + 1;
1261 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1265 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1267 xas->xa_node = xa_to_node(entry);
1268 xas->xa_offset = 0;
1274 xas_advance(xas);
1277 if (!xas->xa_node)
1278 xas->xa_node = XAS_BOUNDS;
1285 * @xas: XArray operation state.
1289 * If the @xas has not yet been walked to an entry, return the marked entry
1290 * which has an index >= xas.xa_index. If it has been walked, the entry
1292 * first marked entry with an index > xas.xa_index.
1294 * If no marked entry is found and the array is smaller than @max, @xas is
1295 * set to the bounds state and xas->xa_index is set to the smallest index
1296 * not yet in the array. This allows @xas to be immediately passed to
1299 * If no entry is found before @max is reached, @xas is set to the restart
1304 void *xas_find_marked(struct xa_state *xas, unsigned long max, xa_mark_t mark)
1310 if (xas_error(xas))
1312 if (xas->xa_index > max)
1315 if (!xas->xa_node) {
1316 xas->xa_index = 1;
1318 } else if (xas_top(xas->xa_node)) {
1320 entry = xa_head(xas->xa);
1321 xas->xa_node = NULL;
1322 if (xas->xa_index > max_index(entry))
1325 if (xa_marked(xas->xa, mark))
1327 xas->xa_index = 1;
1330 xas->xa_node = xa_to_node(entry);
1331 xas->xa_offset = xas->xa_index >> xas->xa_node->shift;
1334 while (xas->xa_index <= max) {
1335 if (unlikely(xas->xa_offset == XA_CHUNK_SIZE)) {
1336 xas->xa_offset = xas->xa_node->offset + 1;
1337 xas->xa_node = xa_parent(xas->xa, xas->xa_node);
1338 if (!xas->xa_node)
1345 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1347 xas->xa_offset = xa_to_sibling(entry);
1348 xas_move_index(xas, xas->xa_offset);
1352 offset = xas_find_chunk(xas, advance, mark);
1353 if (offset > xas->xa_offset) {
1355 xas_move_index(xas, offset);
1357 if ((xas->xa_index - 1) >= max)
1359 xas->xa_offset = offset;
1364 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset);
1365 if (!entry && !(xa_track_free(xas->xa) && mark == XA_FREE_MARK))
1369 xas->xa_node = xa_to_node(entry);
1370 xas_set_offset(xas);
1374 if (xas->xa_index > max)
1376 return set_bounds(xas);
1378 xas->xa_node = XAS_RESTART;
1385 * @xas: XArray operation state.
1387 * The @xas describes both a range and a position within that range.
1390 * Return: The next entry in the range covered by @xas or %NULL.
1392 void *xas_find_conflict(struct xa_state *xas)
1396 if (xas_error(xas))
1399 if (!xas->xa_node)
1402 if (xas_top(xas->xa_node)) {
1403 curr = xas_start(xas);
1408 curr = xas_descend(xas, node);
1414 if (xas->xa_node->shift > xas->xa_shift)
1418 if (xas->xa_node->shift == xas->xa_shift) {
1419 if ((xas->xa_offset & xas->xa_sibs) == xas->xa_sibs)
1421 } else if (xas->xa_offset == XA_CHUNK_MASK) {
1422 xas->xa_offset = xas->xa_node->offset;
1423 xas->xa_node = xa_parent_locked(xas->xa, xas->xa_node);
1424 if (!xas->xa_node)
1428 curr = xa_entry_locked(xas->xa, xas->xa_node, ++xas->xa_offset);
1432 xas->xa_node = xa_to_node(curr);
1433 xas->xa_offset = 0;
1434 curr = xa_entry_locked(xas->xa, xas->xa_node, 0);
1439 xas->xa_offset -= xas->xa_sibs;
1454 XA_STATE(xas, xa, index);
1459 entry = xas_load(&xas);
1462 } while (xas_retry(&xas, entry));
1469 static void *xas_result(struct xa_state *xas, void *curr)
1473 if (xas_error(xas))
1474 curr = xas->xa_node;
1492 XA_STATE(xas, xa, index);
1493 return xas_result(&xas, xas_store(&xas, NULL));
1538 XA_STATE(xas, xa, index);
1547 curr = xas_store(&xas, entry);
1549 xas_clear_mark(&xas, XA_FREE_MARK);
1550 } while (__xas_nomem(&xas, gfp));
1552 return xas_result(&xas, curr);
1604 XA_STATE(xas, xa, index);
1611 curr = xas_load(&xas);
1613 xas_store(&xas, entry);
1615 xas_clear_mark(&xas, XA_FREE_MARK);
1617 } while (__xas_nomem(&xas, gfp));
1619 return xas_result(&xas, curr);
1641 XA_STATE(xas, xa, index);
1650 curr = xas_load(&xas);
1652 xas_store(&xas, entry);
1654 xas_clear_mark(&xas, XA_FREE_MARK);
1656 xas_set_err(&xas, -EBUSY);
1658 } while (__xas_nomem(&xas, gfp));
1660 return xas_error(&xas);
1665 static void xas_set_range(struct xa_state *xas, unsigned long first,
1672 xas_set(xas, first);
1692 xas->xa_shift = shift;
1693 xas->xa_sibs = sibs;
1717 XA_STATE(xas, xa, 0);
1725 xas_lock(&xas);
1730 xas_set_order(&xas, last, order);
1731 xas_create(&xas, true);
1732 if (xas_error(&xas))
1736 xas_set_range(&xas, first, last);
1737 xas_store(&xas, entry);
1738 if (xas_error(&xas))
1740 first += xas_size(&xas);
1743 xas_unlock(&xas);
1744 } while (xas_nomem(&xas, gfp));
1746 return xas_result(&xas, NULL);
1759 XA_STATE(xas, xa, index);
1764 entry = xas_load(&xas);
1769 if (!xas.xa_node)
1773 unsigned int slot = xas.xa_offset + (1 << order);
1777 if (!xa_is_sibling(xas.xa_node->slots[slot]))
1782 order += xas.xa_node->shift;
1811 XA_STATE(xas, xa, 0);
1822 xas.xa_index = limit.min;
1823 xas_find_marked(&xas, limit.max, XA_FREE_MARK);
1824 if (xas.xa_node == XAS_RESTART)
1825 xas_set_err(&xas, -EBUSY);
1827 *id = xas.xa_index;
1828 xas_store(&xas, entry);
1829 xas_clear_mark(&xas, XA_FREE_MARK);
1830 } while (__xas_nomem(&xas, gfp));
1832 return xas_error(&xas);
1898 XA_STATE(xas, xa, index);
1899 void *entry = xas_load(&xas);
1902 xas_set_mark(&xas, mark);
1916 XA_STATE(xas, xa, index);
1917 void *entry = xas_load(&xas);
1920 xas_clear_mark(&xas, mark);
1938 XA_STATE(xas, xa, index);
1942 entry = xas_start(&xas);
1943 while (xas_get_mark(&xas, mark)) {
1946 entry = xas_descend(&xas, xa_to_node(entry));
2012 XA_STATE(xas, xa, *indexp);
2018 entry = xas_find_marked(&xas, max, filter);
2020 entry = xas_find(&xas, max);
2021 } while (xas_retry(&xas, entry));
2025 *indexp = xas.xa_index;
2030 static bool xas_sibling(struct xa_state *xas)
2032 struct xa_node *node = xas->xa_node;
2038 return (xas->xa_index & mask) >
2039 ((unsigned long)xas->xa_offset << node->shift);
2062 XA_STATE(xas, xa, *indexp + 1);
2065 if (xas.xa_index == 0)
2071 entry = xas_find_marked(&xas, max, filter);
2073 entry = xas_find(&xas, max);
2075 if (xas_invalid(&xas))
2077 if (xas_sibling(&xas))
2079 if (!xas_retry(&xas, entry))
2085 *indexp = xas.xa_index;
2090 static unsigned int xas_extract_present(struct xa_state *xas, void **dst,
2097 xas_for_each(xas, entry, max) {
2098 if (xas_retry(xas, entry))
2109 static unsigned int xas_extract_marked(struct xa_state *xas, void **dst,
2116 xas_for_each_marked(xas, entry, max, mark) {
2117 if (xas_retry(xas, entry))
2159 XA_STATE(xas, xa, start);
2165 return xas_extract_marked(&xas, dst, max, n, filter);
2166 return xas_extract_present(&xas, dst, max, n);
2179 struct xa_state xas = {
2189 xas_store(&xas, NULL);
2205 XA_STATE(xas, xa, 0);
2209 xas.xa_node = NULL;
2210 xas_lock_irqsave(&xas, flags);
2213 xas_init_marks(&xas);
2218 xas_free_nodes(&xas, xa_to_node(entry));
2219 xas_unlock_irqrestore(&xas, flags);