Lines Matching refs:prealloc

579  * struct 'prealloc' as the newly created second half.  'split' indicates an
585 * prealloc: [orig->start, split - 1]
592 struct extent_state *prealloc, u64 split)
599 prealloc->start = orig->start;
600 prealloc->end = split - 1;
601 prealloc->state = orig->state;
604 node = tree_insert(&tree->state, &orig->rb_node, prealloc->end,
605 &prealloc->rb_node, NULL, NULL);
607 free_extent_state(prealloc);
669 alloc_extent_state_atomic(struct extent_state *prealloc)
671 if (!prealloc)
672 prealloc = alloc_extent_state(GFP_ATOMIC);
674 return prealloc;
702 struct extent_state *prealloc = NULL;
720 if (!prealloc && gfpflags_allow_blocking(mask)) {
728 prealloc = alloc_extent_state(mask);
787 prealloc = alloc_extent_state_atomic(prealloc);
788 BUG_ON(!prealloc);
789 err = split_state(tree, state, prealloc, start);
793 prealloc = NULL;
810 prealloc = alloc_extent_state_atomic(prealloc);
811 BUG_ON(!prealloc);
812 err = split_state(tree, state, prealloc, end + 1);
819 clear_state_bit(tree, prealloc, &bits, wake, changeset);
821 prealloc = NULL;
843 if (prealloc)
844 free_extent_state(prealloc);
970 struct extent_state *prealloc = NULL;
982 if (!prealloc && gfpflags_allow_blocking(mask)) {
990 prealloc = alloc_extent_state(mask);
1008 prealloc = alloc_extent_state_atomic(prealloc);
1009 BUG_ON(!prealloc);
1010 err = insert_state(tree, prealloc, start, end,
1015 cache_state(prealloc, cached_state);
1016 prealloc = NULL;
1083 prealloc = alloc_extent_state_atomic(prealloc);
1084 BUG_ON(!prealloc);
1085 err = split_state(tree, state, prealloc, start);
1089 prealloc = NULL;
1120 prealloc = alloc_extent_state_atomic(prealloc);
1121 BUG_ON(!prealloc);
1124 * Avoid to free 'prealloc' if it can be merged with
1127 err = insert_state(tree, prealloc, start, this_end,
1132 cache_state(prealloc, cached_state);
1133 prealloc = NULL;
1150 prealloc = alloc_extent_state_atomic(prealloc);
1151 BUG_ON(!prealloc);
1152 err = split_state(tree, state, prealloc, end + 1);
1156 set_state_bits(tree, prealloc, &bits, changeset);
1157 cache_state(prealloc, cached_state);
1158 merge_state(tree, prealloc);
1159 prealloc = NULL;
1173 if (prealloc)
1174 free_extent_state(prealloc);
1212 struct extent_state *prealloc = NULL;
1226 if (!prealloc) {
1234 prealloc = alloc_extent_state(GFP_NOFS);
1235 if (!prealloc && !first_iteration)
1255 prealloc = alloc_extent_state_atomic(prealloc);
1256 if (!prealloc) {
1260 err = insert_state(tree, prealloc, start, end,
1264 cache_state(prealloc, cached_state);
1265 prealloc = NULL;
1309 prealloc = alloc_extent_state_atomic(prealloc);
1310 if (!prealloc) {
1314 err = split_state(tree, state, prealloc, start);
1317 prealloc = NULL;
1348 prealloc = alloc_extent_state_atomic(prealloc);
1349 if (!prealloc) {
1355 * Avoid to free 'prealloc' if it can be merged with
1358 err = insert_state(tree, prealloc, start, this_end,
1362 cache_state(prealloc, cached_state);
1363 prealloc = NULL;
1374 prealloc = alloc_extent_state_atomic(prealloc);
1375 if (!prealloc) {
1380 err = split_state(tree, state, prealloc, end + 1);
1384 set_state_bits(tree, prealloc, &bits, NULL);
1385 cache_state(prealloc, cached_state);
1386 clear_state_bit(tree, prealloc, &clear_bits, 0, NULL);
1387 prealloc = NULL;
1401 if (prealloc)
1402 free_extent_state(prealloc);
4609 * So regular extent won't get merged with prealloc extent