Lines Matching defs:run
61 struct runs_tree *run, const CLST *vcn)
69 if (svcn >= evcn + 1 || run_is_mapped_full(run, svcn, evcn))
81 err = run_unpack_ex(run, ni->mi.sbi, ni->mi.rno, svcn, evcn,
93 static int run_deallocate_ex(struct ntfs_sb_info *sbi, struct runs_tree *run,
103 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx)) {
105 run_truncate(run, vcn0);
132 if (!run_get_entry(run, ++idx, &vcn, &lcn, &clen) ||
134 /* Save memory - don't load entire run. */
147 * attr_allocate_clusters - Find free space, mark it as used and store in @run.
149 int attr_allocate_clusters(struct ntfs_sb_info *sbi, struct runs_tree *run,
156 size_t cnt = run->count;
180 /* Add new fragment into run storage. */
181 if (!run_add_entry(run, vcn, lcn, flen, opt & ALLOCATE_MFT)) {
202 (fr && run->count - cnt >= fr)) {
213 run_deallocate_ex(sbi, run, vcn0, vcn - vcn0, NULL, false);
214 run_truncate(run, vcn0);
228 u64 new_size, struct runs_tree *run,
260 run_init(run);
275 err = attr_allocate_clusters(sbi, run, 0, 0, len, NULL,
284 err = ntfs_sb_write_run(sbi, run, 0, data, rsize, 0);
316 attr_s->name_len, run, 0, alen,
341 run_deallocate(sbi, run, false);
342 run_close(run);
354 u64 new_size, struct runs_tree *run,
372 run, ins_attr, NULL);
401 const __le16 *name, u8 name_len, struct runs_tree *run,
432 err = attr_set_size_res(ni, attr_b, le_b, mi_b, new_size, run,
500 err = attr_load_runs(attr, ni, run, NULL);
517 * - update packed run in 'mi'
542 !run_lookup_entry(run, vcn, &lcn, NULL, NULL)) {
564 if (!run_add_entry(run, vcn, SPARSE_LCN, to_allocate,
573 sbi, run, vcn, lcn, to_allocate, &pre_alloc,
593 err = mi_pack_runs(mi, attr, run, vcn - svcn);
651 err = ni_insert_nonresident(ni, type, name, name_len, run,
674 run_truncate_head(run, evcn + 1);
694 * - update packed run in 'mi'
705 err = mi_pack_runs(mi, attr, run, vcn - svcn);
750 err = run_deallocate_ex(sbi, run, vcn, evcn - vcn + 1, &dlen,
761 run_truncate(run, vcn);
856 if (mi_pack_runs(mi, attr, run, evcn - svcn + 1))
860 run_deallocate_ex(sbi, run, vcn, alen, NULL, false);
862 run_truncate(run, vcn);
886 struct runs_tree *run = &ni->file.run;
903 if (!run_lookup_entry(run, vcn, lcn, len, NULL))
918 if (!run_lookup_entry(run, vcn, lcn, len, NULL))
971 err = attr_load_runs(attr, ni, run, NULL);
976 if (run_lookup_entry(run, vcn, lcn, len, NULL)) {
1023 err = attr_load_runs(attr, ni, run, NULL);
1036 if (!run_add_entry(run, evcn1, SPARSE_LCN, vcn - evcn1,
1041 } else if (vcn && !run_lookup_entry(run, vcn - 1, &hint, NULL, NULL)) {
1046 err = attr_allocate_clusters(sbi, run, vcn, hint + 1, to_alloc, NULL,
1060 if (!run_lookup_entry(run, vcn0, lcn, len, NULL)) {
1076 err = mi_pack_runs(mi, attr, run, max(end, evcn1) - svcn);
1178 err = attr_load_runs(attr, ni, run, &end);
1184 err = mi_pack_runs(mi, attr, run, evcn1 - next_svcn);
1195 err = ni_insert_nonresident(ni, ATTR_DATA, NULL, 0, run,
1202 run_truncate_around(run, vcn);
1218 if (run_deallocate_ex(sbi, run, vcn, alen, NULL, false) ||
1219 !run_add_entry(run, vcn, SPARSE_LCN, alen, false) ||
1220 mi_pack_runs(mi, attr, run, max(end, evcn1) - svcn)) {
1300 const __le16 *name, u8 name_len, struct runs_tree *run,
1332 err = run_unpack_ex(run, ni->mi.sbi, ni->mi.rno, svcn, evcn, svcn,
1343 const __le16 *name, u8 name_len, struct runs_tree *run,
1354 if (!run_lookup_entry(run, vcn, &lcn, &clen, NULL)) {
1355 err = attr_load_runs_vcn(ni, type, name, name_len, run,
1373 struct runs_tree *run, u64 frame, u64 frames,
1458 ARRAY_SIZE(WOF_NAME), run,
1463 err = ntfs_bio_pages(sbi, run, &page, 1, from,
1524 struct runs_tree *run;
1536 run = &ni->file.run;
1538 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx)) {
1540 attr->name_len, run, vcn);
1544 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx))
1573 if (!run_get_entry(run, ++idx, &vcn, &lcn, &clen) ||
1577 attr->name_len, run, vcn_next);
1582 if (!run_lookup_entry(run, vcn, &lcn, &clen, &idx))
1625 struct runs_tree *run = &ni->file.run;
1668 err = attr_load_runs(attr, ni, run, NULL);
1684 err = run_deallocate_ex(sbi, run, vcn + len, clst_data - len,
1689 if (!run_add_entry(run, vcn + len, SPARSE_LCN, clst_data - len,
1700 !run_lookup_entry(run, vcn + clst_data - 1, &hint, NULL,
1705 err = attr_allocate_clusters(sbi, run, vcn + clst_data,
1719 err = mi_pack_runs(mi, attr, run, max(end, evcn1) - svcn);
1803 err = attr_load_runs(attr, ni, run, &end);
1809 err = mi_pack_runs(mi, attr, run, evcn1 - next_svcn);
1821 err = ni_insert_nonresident(ni, ATTR_DATA, NULL, 0, run,
1828 run_truncate_around(run, vcn);
1850 struct runs_tree *run = &ni->file.run;
1901 err = attr_set_size(ni, ATTR_DATA, NULL, 0, &ni->file.run, vbo,
1956 err = attr_load_runs(attr, ni, run, &svcn);
1962 err = run_deallocate_ex(sbi, run, vcn1, eat, &dealloc,
1967 if (!run_collapse_range(run, vcn1, eat)) {
1981 err = mi_pack_runs(mi, attr, run, evcn1 - svcn - eat);
1988 ni, ATTR_DATA, NULL, 0, run, next_svcn,
1999 run_truncate(run, 0);
2108 struct runs_tree *run = &ni->file.run;
2170 run_truncate(run, 0);
2208 err = attr_load_runs(attr, ni, run, &svcn);
2219 err = run_deallocate_ex(NULL, run, vcn1, zero, &hole2, false);
2227 /* Make a clone of run to undo. */
2228 err = run_clone(run, &run2);
2233 if (!run_add_entry(run, vcn1, SPARSE_LCN, zero, false)) {
2238 /* Update run in attribute segment. */
2239 err = mi_pack_runs(mi, attr, run, evcn1 - svcn);
2245 err = ni_insert_nonresident(ni, ATTR_DATA, NULL, 0, run,
2261 run_truncate(run, 0);
2326 struct runs_tree *run = &ni->file.run;
2382 err = attr_set_size(ni, ATTR_DATA, NULL, 0, run,
2438 run_truncate(run, 0); /* clear cached values. */
2439 err = attr_load_runs(attr, ni, run, NULL);
2443 if (!run_insert_range(run, vcn, len)) {
2449 err = mi_pack_runs(mi, attr, run, evcn1 + len - svcn);
2467 err = ni_insert_nonresident(ni, ATTR_DATA, NULL, 0, run,
2507 run_truncate(run, 0); /* clear cached values. */
2539 if (attr_load_runs(attr, ni, run, NULL))
2542 if (!run_collapse_range(run, vcn, len))
2545 if (mi_pack_runs(mi, attr, run, evcn1 + len - svcn))