Lines Matching refs:locked_ref
1885 struct btrfs_delayed_ref_head *locked_ref,
1897 lockdep_assert_held(&locked_ref->mutex);
1898 lockdep_assert_held(&locked_ref->lock);
1900 while ((ref = select_delayed_ref(locked_ref))) {
1903 spin_unlock(&locked_ref->lock);
1904 unselect_delayed_ref_head(delayed_refs, locked_ref);
1910 rb_erase_cached(&ref->ref_node, &locked_ref->ref_tree);
1921 locked_ref->ref_mod -= ref->ref_mod;
1924 locked_ref->ref_mod += ref->ref_mod;
1935 must_insert_reserved = locked_ref->must_insert_reserved;
1936 locked_ref->must_insert_reserved = 0;
1938 extent_op = locked_ref->extent_op;
1939 locked_ref->extent_op = NULL;
1940 spin_unlock(&locked_ref->lock);
1947 unselect_delayed_ref_head(delayed_refs, locked_ref);
1955 spin_lock(&locked_ref->lock);
1956 btrfs_merge_delayed_refs(trans, delayed_refs, locked_ref);
1971 struct btrfs_delayed_ref_head *locked_ref = NULL;
1979 if (!locked_ref) {
1980 locked_ref = btrfs_obtain_ref_head(trans);
1981 if (IS_ERR_OR_NULL(locked_ref)) {
1982 if (PTR_ERR(locked_ref) == -EAGAIN) {
2002 spin_lock(&locked_ref->lock);
2003 btrfs_merge_delayed_refs(trans, delayed_refs, locked_ref);
2005 ret = btrfs_run_delayed_refs_for_head(trans, locked_ref,
2018 ret = cleanup_ref_head(trans, locked_ref);
2033 locked_ref = NULL;
2035 } while ((nr != -1 && count < nr) || locked_ref);