Lines Matching refs:depth
111 int depth, i;
115 depth = path->p_depth;
116 for (i = 0; i <= depth; i++, path++) {
215 int depth = path->p_depth;
235 ex = path[depth].p_ext;
248 if (path[depth].p_bh)
249 return path[depth].p_bh->b_blocknr;
344 ext4_ext_max_entries(struct inode *inode, int depth)
348 if (depth == ext_depth(inode)) {
349 if (depth == 0)
354 if (depth == 0)
390 int depth)
401 if (depth == 0) {
409 if (depth != ext_depth(inode) &&
433 if (depth != ext_depth(inode) &&
456 int depth, ext4_fsblk_t pblk, ext4_lblk_t lblk)
465 if (unlikely(le16_to_cpu(eh->eh_depth) != depth)) {
473 max = ext4_ext_max_entries(inode, depth);
482 if (unlikely((eh->eh_entries == 0) && (depth > 0))) {
486 if (!ext4_valid_extent_entries(inode, eh, lblk, &pblk, depth)) {
490 if (unlikely(depth > 32)) {
495 if (ext_depth(inode) != depth &&
506 "entries %u, max %u(%u), depth %u(%u)",
511 max, le16_to_cpu(eh->eh_depth), depth);
515 #define ext4_ext_check(inode, eh, depth, pblk) \
516 __ext4_ext_check(__func__, __LINE__, (inode), (eh), (depth), (pblk), 0)
550 int depth, int flags)
574 depth, pblk, le32_to_cpu(idx->ei_block));
581 if (!(flags & EXT4_EX_NOCACHE) && depth == 0) {
592 #define read_extent_tree_block(inode, idx, depth, flags) \
594 (depth), (flags))
605 int i = 0, depth, ret = 0;
611 depth = ext_depth(inode);
614 if (!depth) {
619 path = kcalloc(depth + 1, sizeof(struct ext4_ext_path),
627 ret = ext4_ext_check(inode, path[0].p_hdr, depth, 0);
636 if ((i == depth) ||
644 depth - i - 1,
687 int depth = ext_depth(inode);
695 eh = path[depth].p_hdr;
711 int depth = ext_depth(inode);
714 if (depth != level) {
727 ex = path[depth].p_ext;
728 while (ex <= EXT_MAX_EXTENT(path[depth].p_hdr)) {
889 short int depth, i, ppos = 0;
897 depth = ext_depth(inode);
898 if (depth < 0 || depth > EXT4_MAX_EXTENT_DEPTH) {
899 EXT4_ERROR_INODE(inode, "inode has invalid extent depth: %d",
900 depth);
907 if (depth > path[0].p_maxdepth) {
913 /* account possible depth increase */
914 path = kcalloc(depth + 2, sizeof(struct ext4_ext_path),
918 path[0].p_maxdepth = depth + 1;
923 i = depth;
924 if (!(flags & EXT4_EX_NOCACHE) && depth == 0)
928 ext_debug(inode, "depth %d: num %d, max %d\n",
1045 * at depth @at:
1058 int depth = ext_depth(inode);
1077 if (unlikely(path[depth].p_ext > EXT_MAX_EXTENT(path[depth].p_hdr))) {
1081 if (path[depth].p_ext != EXT_MAX_EXTENT(path[depth].p_hdr)) {
1082 border = path[depth].p_ext[1].ee_block;
1105 ablocks = kcalloc(depth, sizeof(ext4_fsblk_t), gfp_flags);
1110 ext_debug(inode, "allocate %d blocks for indexes/leaf\n", depth - at);
1111 for (a = 0; a < depth - at; a++) {
1145 /* move remainder of path[depth] to the new leaf */
1146 if (unlikely(path[depth].p_hdr->eh_entries !=
1147 path[depth].p_hdr->eh_max)) {
1149 path[depth].p_hdr->eh_entries,
1150 path[depth].p_hdr->eh_max);
1155 m = EXT_MAX_EXTENT(path[depth].p_hdr) - path[depth].p_ext++;
1156 ext4_ext_show_move(inode, path, newblock, depth);
1160 memmove(ex, path[depth].p_ext, sizeof(struct ext4_extent) * m);
1180 err = ext4_ext_get_access(handle, inode, path + depth);
1183 le16_add_cpu(&path[depth].p_hdr->eh_entries, -m);
1184 err = ext4_ext_dirty(handle, inode, path + depth);
1191 k = depth - at - 1;
1200 /* current depth stored in i var */
1201 i = depth - 1;
1221 neh->eh_depth = cpu_to_le16(depth - i);
1291 for (i = 0; i < depth; i++) {
1396 * if no free index is found, then it requests in-depth growing.
1406 int depth, i, err = 0;
1409 i = depth = ext_depth(inode);
1412 curp = path + depth;
1434 /* tree is full, time to grow in depth */
1449 * only first (depth 0 -> 1) produces free space;
1452 depth = ext_depth(inode);
1453 if (path[depth].p_hdr->eh_entries == path[depth].p_hdr->eh_max) {
1476 int depth, ee_len;
1482 depth = path->p_depth;
1485 if (depth == 0 && path->p_ext == NULL)
1492 ex = path[depth].p_ext;
1495 if (unlikely(EXT_FIRST_EXTENT(path[depth].p_hdr) != ex)) {
1501 while (--depth >= 0) {
1502 ix = path[depth].p_idx;
1503 if (unlikely(ix != EXT_FIRST_INDEX(path[depth].p_hdr))) {
1505 "ix (%d) != EXT_FIRST_INDEX (%d) (depth %d)!",
1507 le32_to_cpu(EXT_FIRST_INDEX(path[depth].p_hdr)->ei_block),
1508 depth);
1543 int depth; /* Note, NOT eh_depth; depth from top of tree */
1550 depth = path->p_depth;
1553 if (depth == 0 && path->p_ext == NULL)
1560 ex = path[depth].p_ext;
1563 if (unlikely(EXT_FIRST_EXTENT(path[depth].p_hdr) != ex)) {
1566 depth);
1569 while (--depth >= 0) {
1570 ix = path[depth].p_idx;
1571 if (unlikely(ix != EXT_FIRST_INDEX(path[depth].p_hdr))) {
1588 if (ex != EXT_LAST_EXTENT(path[depth].p_hdr)) {
1595 while (--depth >= 0) {
1596 ix = path[depth].p_idx;
1597 if (ix != EXT_LAST_INDEX(path[depth].p_hdr))
1609 while (++depth < path->p_depth) {
1611 bh = read_extent_tree_block(inode, ix, path->p_depth - depth, 0);
1619 bh = read_extent_tree_block(inode, ix, path->p_depth - depth, 0);
1644 int depth;
1647 depth = path->p_depth;
1649 if (depth == 0 && path->p_ext == NULL)
1652 while (depth >= 0) {
1653 struct ext4_ext_path *p = &path[depth];
1655 if (depth == path->p_depth) {
1664 depth--;
1676 int depth;
1679 depth = path->p_depth;
1682 if (depth == 0)
1686 depth--;
1688 while (depth >= 0) {
1689 if (path[depth].p_idx !=
1690 EXT_LAST_INDEX(path[depth].p_hdr))
1692 le32_to_cpu(path[depth].p_idx[1].ei_block);
1693 depth--;
1709 int depth = ext_depth(inode);
1714 eh = path[depth].p_hdr;
1715 ex = path[depth].p_ext;
1723 if (depth == 0) {
1736 k = depth - 1;
1737 border = path[depth].p_ext->ee_block;
1806 unsigned int depth, len;
1809 depth = ext_depth(inode);
1810 BUG_ON(path[depth].p_hdr == NULL);
1811 eh = path[depth].p_hdr;
1894 unsigned int depth;
1897 depth = ext_depth(inode);
1898 BUG_ON(path[depth].p_hdr == NULL);
1899 eh = path[depth].p_hdr;
1924 unsigned int depth, len1;
1929 depth = ext_depth(inode);
1930 if (!path[depth].p_ext)
1932 b2 = EXT4_LBLK_CMASK(sbi, le32_to_cpu(path[depth].p_ext->ee_block));
1976 int depth, len, err;
1986 depth = ext_depth(inode);
1987 ex = path[depth].p_ext;
1988 eh = path[depth].p_hdr;
1989 if (unlikely(path[depth].p_hdr == NULL)) {
1990 EXT4_ERROR_INODE(inode, "path[%d].p_hdr == NULL", depth);
2027 path + depth);
2052 path + depth);
2068 depth = ext_depth(inode);
2069 eh = path[depth].p_hdr;
2085 eh = npath[depth].p_hdr;
2106 depth = ext_depth(inode);
2107 eh = path[depth].p_hdr;
2110 nearex = path[depth].p_ext;
2112 err = ext4_ext_get_access(handle, inode, path + depth);
2162 path[depth].p_ext = nearex;
2248 int depth = ext_depth(inode);
2252 ex = path[depth].p_ext;
2278 struct ext4_ext_path *path, int depth)
2284 depth--;
2285 path = path + depth;
2311 while (--depth >= 0) {
2337 int depth = ext_depth(inode);
2341 if (le16_to_cpu(path[depth].p_hdr->eh_entries)
2342 < le16_to_cpu(path[depth].p_hdr->eh_max)) {
2373 int depth;
2379 depth = ext_depth(inode);
2382 index = depth * 2;
2384 index = depth * 3;
2573 int depth = ext_depth(inode), credits, revoke_credits;
2585 if (!path[depth].p_hdr)
2586 path[depth].p_hdr = ext_block_hdr(path[depth].p_bh);
2587 eh = path[depth].p_hdr;
2588 if (unlikely(path[depth].p_hdr == NULL)) {
2589 EXT4_ERROR_INODE(inode, "path[%d].p_hdr == NULL", depth);
2593 ex = path[depth].p_ext;
2612 path[depth].p_ext = ex;
2682 err = ext4_ext_get_access(handle, inode, path + depth);
2722 err = ext4_ext_dirty(handle, inode, path + depth);
2761 if (err == 0 && eh->eh_entries == 0 && path[depth].p_bh != NULL)
2762 err = ext4_ext_rm_idx(handle, inode, path, depth);
2793 int depth = ext_depth(inode);
2807 depth + 1,
2808 ext4_free_metadata_revoke_credits(inode->i_sb, depth));
2813 trace_ext4_ext_remove_space(inode, start, end, depth);
2834 depth = ext_depth(inode);
2836 ex = path[depth].p_ext;
2838 if (depth) {
2841 depth);
2905 * after i_size and walking into the tree depth-wise.
2907 depth = ext_depth(inode);
2909 int k = i = depth;
2914 path = kcalloc(depth + 1, sizeof(struct ext4_ext_path),
2920 path[0].p_maxdepth = path[0].p_depth = depth;
2924 if (ext4_ext_check(inode, path[0].p_hdr, depth, 0)) {
2932 if (i == depth) {
2971 depth - i - 1,
2981 if (WARN_ON(i + 1 > depth)) {
3007 trace_ext4_ext_remove_space_done(inode, start, end, depth, &partial,
3096 printk(KERN_ERR "EXT4-fs: extents: %lu min, %lu max, max depth %lu\n",
3164 unsigned int ee_len, depth;
3174 depth = ext_depth(inode);
3175 ex = path[depth].p_ext;
3186 err = ext4_ext_get_access(handle, inode, path + depth);
3218 err = ext4_ext_dirty(handle, inode, path + depth);
3310 unsigned int ee_len, depth;
3316 depth = ext_depth(inode);
3317 ex = path[depth].p_ext;
3344 depth = ext_depth(inode);
3345 ex = path[depth].p_ext;
3404 unsigned int ee_len, depth, map_len = map->m_len;
3418 depth = ext_depth(inode);
3419 eh = path[depth].p_hdr;
3420 ex = path[depth].p_ext;
3474 err = ext4_ext_get_access(handle, inode, path + depth);
3520 err = ext4_ext_get_access(handle, inode, path + depth);
3542 err = ext4_ext_dirty(handle, inode, path + depth);
3545 path[depth].p_ext = abut_ex;
3660 int split_flag = 0, depth;
3673 depth = ext_depth(inode);
3674 ex = path[depth].p_ext;
3700 int depth;
3703 depth = ext_depth(inode);
3704 ex = path[depth].p_ext;
3731 depth = ext_depth(inode);
3732 ex = path[depth].p_ext;
3735 err = ext4_ext_get_access(handle, inode, path + depth);
3763 int depth;
3773 depth = ext_depth(inode);
3774 ex = path[depth].p_ext;
3789 depth = ext_depth(inode);
3790 ex = path[depth].p_ext;
3798 err = ext4_ext_get_access(handle, inode, path + depth);
4130 int err = 0, depth, ret;
4147 depth = ext_depth(inode);
4154 if (unlikely(path[depth].p_ext == NULL && depth != 0)) {
4156 "lblock: %lu, depth: %d pblock %lld",
4157 (unsigned long) map->m_lblk, depth,
4158 path[depth].p_block);
4163 ex = path[depth].p_ext;
4453 int depth = 0;
4473 depth = ext_depth(inode);
4478 * Recalculate credits when extent tree depth changes.
4480 if (depth != ext_depth(inode)) {
4482 depth = ext_depth(inode);
5050 * Shift the extents of a path structure lying between path[depth].p_ext
5051 * and EXT_LAST_EXTENT(path[depth].p_hdr), by @shift blocks. @SHIFT tells
5059 int depth, err = 0;
5063 depth = path->p_depth;
5065 while (depth >= 0) {
5066 if (depth == path->p_depth) {
5067 ex_start = path[depth].p_ext;
5071 ex_last = EXT_LAST_EXTENT(path[depth].p_hdr);
5074 if (ex_start == EXT_FIRST_EXTENT(path[depth].p_hdr)) {
5077 credits = depth + 2;
5089 err = ext4_ext_get_access(handle, inode, path + depth);
5099 EXT_FIRST_EXTENT(path[depth].p_hdr))
5113 err = ext4_ext_dirty(handle, inode, path + depth);
5117 if (--depth < 0 || !update)
5122 err = ext4_ext_get_access(handle, inode, path + depth);
5127 le32_add_cpu(&path[depth].p_idx->ei_block, -shift);
5129 le32_add_cpu(&path[depth].p_idx->ei_block, shift);
5130 err = ext4_ext_dirty(handle, inode, path + depth);
5135 if (path[depth].p_idx != EXT_FIRST_INDEX(path[depth].p_hdr))
5138 depth--;
5158 int ret = 0, depth;
5169 depth = path->p_depth;
5170 extent = path[depth].p_ext;
5186 depth = path->p_depth;
5187 extent = path[depth].p_ext;
5235 depth = path->p_depth;
5236 extent = path[depth].p_ext;
5245 if (extent < EXT_LAST_EXTENT(path[depth].p_hdr)) {
5246 path[depth].p_ext++;
5255 extent = EXT_LAST_EXTENT(path[depth].p_hdr);
5259 extent = EXT_FIRST_EXTENT(path[depth].p_hdr);
5265 extent = EXT_LAST_EXTENT(path[depth].p_hdr);
5269 if (extent == EXT_LAST_EXTENT(path[depth].p_hdr))
5275 path[depth].p_ext = extent;
5443 int ret = 0, depth, split_flag = 0;
5536 depth = ext_depth(inode);
5537 extent = path[depth].p_ext;
5797 int depth, mapped = 0, err = 0;
5818 depth = ext_depth(inode);
5825 if (unlikely(path[depth].p_ext == NULL && depth != 0)) {
5827 "bad extent address - lblock: %lu, depth: %d, pblock: %lld",
5829 depth, path[depth].p_block);
5834 extent = path[depth].p_ext;