Lines Matching refs:slots

12  * Each node type has a number of slots for entries and a number of slots for
23 * The following illustrates the layout of a range64 nodes slots and pivots.
777 * ma_slots() - Get a pointer to the maple node slots.
781 * Return: A pointer to the maple node slots
810 void __rcu **slots, unsigned char offset)
812 return rcu_dereference_check(slots[offset], mt_locked(mt));
815 static inline void *mt_slot_locked(struct maple_tree *mt, void __rcu **slots,
818 return rcu_dereference_protected(slots[offset], mt_write_locked(mt));
823 * @slots: The pointer to the slots
824 * @offset: The offset into the slots array to fetch
826 * Return: The entry stored in @slots at the @offset.
828 static inline void *mas_slot_locked(struct ma_state *mas, void __rcu **slots,
831 return mt_slot_locked(mas->tree, slots, offset);
837 * @slots: The pointer to the slots
838 * @offset: The offset into the slots array to fetch
840 * Return: The entry stored in @slots at the @offset
842 static inline void *mas_slot(struct ma_state *mas, void __rcu **slots,
845 return mt_slot(mas->tree, slots, offset);
915 void __rcu **slots;
922 slots = mn->mr64.slot;
923 next = mt_slot_locked(mt, slots,
1037 void __rcu **slots;
1042 slots = ma_slots(node, type);
1047 mas->node = mas_slot(mas, slots, mas->offset);
1251 void **slots = NULL;
1285 slots = (void **)&node->slot[node->node_count];
1287 count = mt_alloc_bulk(gfp, max_req, slots);
1306 memset(slots, 0, max_req * sizeof(unsigned long));
1498 void __rcu **slots;
1504 slots = ma_slots(mn, mt);
1509 if (slots[i]) {
1527 if (likely(!slots[0])) {
1540 if (unlikely(mas->max == ULONG_MAX) && !slots[max_piv + 1]) {
1548 if (likely(slots[i]))
1709 void __rcu **slots = ma_slots(node, type);
1716 child = mas_slot_locked(mas, slots, offset);
1732 void __rcu **slots;
1741 slots = ma_slots(mte_parent(mas->node),
1743 rcu_assign_pointer(slots[offset], mas->node);
1778 void __rcu **slots;
1782 slots = ma_slots(node, mt);
1786 entry = mas_slot_locked(mas, slots, offset);
1843 * @slot_count: the number of slots in the node being considered.
1948 void __rcu **slots;
1979 slots = ma_slots(node, mt);
1980 memcpy(b_node->slot + mab_start, slots + mas_start, sizeof(void *) * j);
2029 void __rcu **slots = ma_slots(node, mt);
2038 slots[mt_pivots[mt]] = NULL;
2045 memcpy(slots, b_node->slot + mab_start,
2145 wr_mas->content = mas_slot_locked(mas, wr_mas->slots,
2790 void __rcu **slots;
2826 slots = ma_slots(node, type);
2827 next = mt_slot(mas->tree, slots, offset);
2857 * orig_l_mas->last is used in mas_consume to find the slots that will need to
3055 void __rcu **l_slots, **slots;
3078 slots = ma_slots(newnode, mt);
3087 memcpy(slots, l_slots + split + 1, sizeof(void *) * tmp);
3090 memcpy(slots + tmp, ma_slots(node, mt), sizeof(void *) * end);
3107 memset(slots + tmp, 0, sizeof(void *) * (max_s - tmp));
3131 slots = ma_slots(new_left, mt);
3133 memcpy(slots, l_slots, sizeof(void *) * split);
3142 slots = ma_slots(parent, mt);
3145 rcu_assign_pointer(slots[offset], mas->node);
3146 rcu_assign_pointer(slots[offset - 1], l_mas.node);
3369 * accomplish the balance, there are empty slots remaining in both left
3454 memset(wr_mas->slots + bn->b_end, 0, sizeof(void *) * clear--);
3511 void __rcu **slots;
3521 slots = ma_slots(node, type);
3527 rcu_assign_pointer(slots[slot], contents);
3534 rcu_assign_pointer(slots[slot], entry);
3593 * rightmost node (writing ULONG_MAX), otherwise it spans slots.
3607 wr_mas->slots = ma_slots(wr_mas->node, wr_mas->type);
3635 wr_mas->content = mas_slot_locked(mas, wr_mas->slots,
3652 wr_mas->content = mas_slot_locked(mas, wr_mas->slots,
3679 !mas_slot_locked(l_mas, l_wr_mas->slots, l_slot - 1))) {
3694 !mas_slot_locked(r_mas, r_wr_mas->slots, r_mas->offset + 1)) {
3731 void __rcu **slots;
3752 slots = ma_slots(node, type);
3753 next = mt_slot(mas->tree, slots, offset);
3781 void __rcu **slots;
3798 slots = ma_slots(node, type);
3801 rcu_assign_pointer(slots[0], entry);
3958 memcpy(dst_slots, wr_mas->slots, sizeof(void *) * mas->offset);
3981 memcpy(dst_slots + dst_offset, wr_mas->slots + offset_end,
4014 void __rcu **slots = wr_mas->slots;
4017 gap |= !mt_slot_locked(mas->tree, slots, offset);
4018 gap |= !mt_slot_locked(mas->tree, slots, offset + 1);
4023 rcu_assign_pointer(slots[offset], wr_mas->entry);
4027 rcu_assign_pointer(slots[offset + 1], wr_mas->entry);
4036 gap |= !mt_slot_locked(mas->tree, slots, offset + 2);
4037 rcu_assign_pointer(slots[offset + 1], wr_mas->entry);
4060 if (!wr_mas->slots[wr_mas->offset_end]) {
4067 !wr_mas->slots[wr_mas->offset_end + 1]) {
4083 !wr_mas->slots[mas->offset - 1]) {
4137 void __rcu **slots;
4156 slots = wr_mas->slots;
4160 rcu_assign_pointer(slots[new_end], wr_mas->entry);
4165 rcu_assign_pointer(slots[new_end], wr_mas->content);
4167 rcu_assign_pointer(slots[end], wr_mas->entry);
4171 rcu_assign_pointer(slots[new_end], wr_mas->content);
4173 rcu_assign_pointer(slots[end + 1], wr_mas->entry);
4208 rcu_assign_pointer(wr_mas->slots[mas->offset], wr_mas->entry);
4363 void __rcu **slots;
4393 slots = ma_slots(node, mt);
4394 mas->node = mas_slot(mas, slots, offset);
4406 slots = ma_slots(node, mt);
4407 mas->node = mas_slot(mas, slots, offset);
4443 void __rcu **slots;
4488 slots = ma_slots(node, type);
4489 entry = mas_slot(mas, slots, mas->offset);
4528 void __rcu **slots;
4553 slots = ma_slots(node, mt);
4555 enode = mas_slot(mas, slots, mas->offset);
4567 slots = ma_slots(node, mt);
4568 enode = mas_slot(mas, slots, 0);
4606 void __rcu **slots;
4664 slots = ma_slots(node, type);
4665 entry = mt_slot(mas->tree, slots, mas->offset);
4727 void __rcu **slots;
4742 slots = ma_slots(node, type);
4755 else if (!mas_slot(mas, slots, offset))
4793 mas->node = mas_slot(mas, slots, offset);
4814 void __rcu **slots;
4825 slots = ma_slots(node, type);
4839 else if (!mas_slot(mas, slots, offset))
4850 mas->node = mas_slot(mas, slots, offset);
5132 * @slots: Pointer to the slot array
5141 void __rcu **slots)
5149 entry = mt_slot(mt, slots, offset);
5158 rcu_assign_pointer(slots[offset], node);
5174 void __rcu **slots = NULL;
5180 slots = ma_slots(node, node->type);
5181 next = rcu_dereference_protected(slots[offset],
5186 return slots;
5197 void __rcu **slots;
5210 slots = mte_dead_walk(&enode, 0);
5213 mt_free_bulk(node->slot_len, slots);
5220 slots = ma_slots(mte_to_node(enode), type);
5222 rcu_dereference_protected(slots[offset],
5224 slots = mte_dead_walk(&enode, offset);
5228 slots = ma_slots(node, node->type);
5229 mt_free_bulk(node->slot_len, slots);
5240 void __rcu **slots = NULL;
5248 slots = ma_slots(node, type);
5249 next = mt_slot_locked(mt, slots, next_offset);
5251 next = mt_slot_locked(mt, slots, ++next_offset);
5262 return slots;
5268 void __rcu **slots;
5278 slots = mte_destroy_descend(&enode, mt, start, 0);
5285 node->slot_len = mte_dead_leaves(enode, mt, slots);
5287 mt_free_bulk(node->slot_len, slots);
5294 slots = ma_slots(mte_to_node(enode), type);
5298 tmp = mt_slot_locked(mt, slots, offset);
5302 slots = mte_destroy_descend(&enode, mt, parent, offset);
5309 node->slot_len = mte_dead_leaves(enode, mt, slots);
5311 mt_free_bulk(node->slot_len, slots);
5636 /* Leaves; reduce slots to keep space for expansion */
7020 void __rcu **slots;
7029 slots = ma_slots(parent, p_type);
7035 node = mas_slot(mas, slots, i);
7052 void __rcu **slots = ma_slots(mte_to_node(mas->node), type);
7061 child = mas_slot(mas, slots, i);
7090 * where the maximum ends and ensure there is no slots or pivots set outside of
7098 void __rcu **slots = ma_slots(mte_to_node(mas->node), type);
7140 void *entry = mas_slot(mas, slots, i);
7165 void __rcu **slots;
7175 slots = ma_slots(mte_to_node(mas.node), mte_node_type(mas.node));
7177 entry = mas_slot(&mas, slots, offset);
7189 slots = ma_slots(mte_to_node(mas.node),