Lines Matching refs:r_refcount

903 		ret_rec->r_refcount = 0;
1054 * This fake record has r_refcount = 0.
1151 if ((rb->rf_records.rl_recs[index].r_refcount ==
1152 rb->rf_records.rl_recs[index + 1].r_refcount) &&
1189 BUG_ON(rb->rf_records.rl_recs[index].r_refcount !=
1190 rb->rf_records.rl_recs[index+1].r_refcount);
1253 index, le32_to_cpu(rec->r_refcount), change);
1254 le32_add_cpu(&rec->r_refcount, change);
1256 if (!rec->r_refcount) {
1790 le32_to_cpu(rec->r_clusters), le32_to_cpu(rec->r_refcount));
1817 * If split_rec->r_refcount > 0, we are changing the refcount(in case we
1819 * If split_rec->r_refcount == 0, we are punching a hole in current refcount
1844 le32_to_cpu(orig_rec->r_refcount),
1847 le32_to_cpu(split_rec->r_refcount));
1854 if (!split_rec->r_refcount &&
1867 if (split_rec->r_refcount &&
1945 * Note: We have the chance that split_rec.r_refcount = 0,
1959 if (split_rec->r_refcount) {
1965 le32_to_cpu(split_rec->r_refcount));
2009 * is the same, cool, we just need to increase the r_refcount
2011 * 2. If we find a hole, just insert it with r_refcount = 1.
2015 if (rec.r_refcount && le64_to_cpu(rec.r_cpos) == cpos &&
2019 le32_to_cpu(rec.r_refcount));
2027 } else if (!rec.r_refcount) {
2028 rec.r_refcount = cpu_to_le32(1);
2046 le32_add_cpu(&rec.r_refcount, 1);
2050 set_len, le32_to_cpu(rec.r_refcount));
2194 le32_add_cpu(&split.r_refcount, -1);
2247 r_count = le32_to_cpu(rec.r_refcount);
2264 if (le32_to_cpu(rec.r_refcount) == 1 && delete) {
2418 le32_to_cpu(rec.r_refcount), index);
2441 if (rec.r_refcount) {
3225 BUG_ON(!rec.r_refcount);
3237 if (le32_to_cpu(rec.r_refcount) == 1) {
3542 BUG_ON(!rec.r_refcount);