Home
last modified time | relevance | path

Searched refs:new_chunk_sectors (Results 1 - 14 of 14) sorted by relevance

/kernel/linux/linux-5.10/drivers/md/
H A Ddm-raid.c225 int new_chunk_sectors; member
265 l->new_chunk_sectors = mddev->new_chunk_sectors; in rs_config_backup()
274 mddev->new_chunk_sectors = l->new_chunk_sectors; in rs_config_restore()
717 mddev->new_chunk_sectors = mddev->chunk_sectors; in rs_set_cur()
730 mddev->chunk_sectors = mddev->new_chunk_sectors; in rs_set_new()
776 * rs->md.new_chunk_sectors in raid_set_alloc()
1160 rs->md.new_chunk_sectors = rs->md.chunk_sectors = value; in parse_raid_params()
1540 uint32_t min_stripes = max(mddev->chunk_sectors, mddev->new_chunk_sectors) / in rs_set_raid456_stripe_cache()
[all...]
H A Draid0.c670 mddev->new_chunk_sectors = mddev->chunk_sectors; in raid0_takeover_raid45()
712 mddev->new_chunk_sectors = mddev->chunk_sectors; in raid0_takeover_raid10()
755 mddev->new_chunk_sectors = chunksect; in raid0_takeover_raid1()
H A Draid5.c7185 if (!mddev->new_chunk_sectors || in setup_conf()
7186 (mddev->new_chunk_sectors << 9) % PAGE_SIZE || in setup_conf()
7187 !is_power_of_2(mddev->new_chunk_sectors)) { in setup_conf()
7189 mdname(mddev), mddev->new_chunk_sectors << 9); in setup_conf()
7297 conf->chunk_sectors = mddev->new_chunk_sectors; in setup_conf()
7354 ((mddev->new_chunk_sectors << 9) / RAID5_STRIPE_SIZE(conf)) * 4); in setup_conf()
7521 chunk_sectors = max(mddev->chunk_sectors, mddev->new_chunk_sectors); in raid5_run()
7543 abs(min_offset_diff) >= mddev->new_chunk_sectors) in raid5_run()
7565 BUG_ON(mddev->chunk_sectors != mddev->new_chunk_sectors); in raid5_run()
8091 ((mddev->new_chunk_sectors << in check_stripe_cache()
[all...]
H A Dmd.h342 int new_chunk_sectors; member
H A Dmd.c1349 mddev->new_chunk_sectors = sb->new_chunk >> 9; in super_90_validate()
1357 mddev->new_chunk_sectors = mddev->chunk_sectors; in super_90_validate()
1493 sb->new_chunk = mddev->new_chunk_sectors << 9; in super_90_sync()
1888 mddev->new_chunk_sectors = le32_to_cpu(sb->new_chunk); in super_1_validate()
1899 mddev->new_chunk_sectors = mddev->chunk_sectors; in super_1_validate()
2101 sb->new_chunk = cpu_to_le32(mddev->new_chunk_sectors); in super_1_sync()
4063 mddev->new_chunk_sectors = mddev->chunk_sectors; in level_store()
4086 mddev->chunk_sectors = mddev->new_chunk_sectors; in level_store()
4288 mddev->chunk_sectors != mddev->new_chunk_sectors) in chunk_size_show()
4290 mddev->new_chunk_sectors << in chunk_size_show()
[all...]
H A Draid1.c3238 if (mddev->chunk_sectors != mddev->new_chunk_sectors || in raid1_reshape()
3241 mddev->new_chunk_sectors = mddev->chunk_sectors; in raid1_reshape()
3341 mddev->new_chunk_sectors = 0; in raid1_takeover()
H A Draid10.c3589 chunk = mddev->new_chunk_sectors; in setup_geo()
3596 chunk = mddev->new_chunk_sectors; in setup_geo()
4017 mddev->new_chunk_sectors = mddev->chunk_sectors; in raid10_takeover_raid0()
4063 * - chunk size (to ->new_chunk_sectors) in raid10_check_reshape()
/kernel/linux/linux-6.6/drivers/md/
H A Ddm-raid.c226 int new_chunk_sectors; member
266 l->new_chunk_sectors = mddev->new_chunk_sectors; in rs_config_backup()
275 mddev->new_chunk_sectors = l->new_chunk_sectors; in rs_config_restore()
717 mddev->new_chunk_sectors = mddev->chunk_sectors; in rs_set_cur()
730 mddev->chunk_sectors = mddev->new_chunk_sectors; in rs_set_new()
776 * rs->md.new_chunk_sectors in raid_set_alloc()
1160 rs->md.new_chunk_sectors = rs->md.chunk_sectors = value; in parse_raid_params()
1540 uint32_t min_stripes = max(mddev->chunk_sectors, mddev->new_chunk_sectors) / in rs_set_raid456_stripe_cache()
[all...]
H A Draid0.c665 mddev->new_chunk_sectors = mddev->chunk_sectors; in raid0_takeover_raid45()
707 mddev->new_chunk_sectors = mddev->chunk_sectors; in raid0_takeover_raid10()
750 mddev->new_chunk_sectors = chunksect; in raid0_takeover_raid1()
H A Draid5.c7523 if (!mddev->new_chunk_sectors || in setup_conf()
7524 (mddev->new_chunk_sectors << 9) % PAGE_SIZE || in setup_conf()
7525 !is_power_of_2(mddev->new_chunk_sectors)) { in setup_conf()
7527 mdname(mddev), mddev->new_chunk_sectors << 9); in setup_conf()
7638 conf->chunk_sectors = mddev->new_chunk_sectors; in setup_conf()
7696 ((mddev->new_chunk_sectors << 9) / RAID5_STRIPE_SIZE(conf)) * 4); in setup_conf()
7864 chunk_sectors = max(mddev->chunk_sectors, mddev->new_chunk_sectors); in raid5_run()
7886 abs(min_offset_diff) >= mddev->new_chunk_sectors) in raid5_run()
7908 BUG_ON(mddev->chunk_sectors != mddev->new_chunk_sectors); in raid5_run()
8445 ((mddev->new_chunk_sectors << in check_stripe_cache()
[all...]
H A Dmd.h368 int new_chunk_sectors; member
H A Dmd.c1308 mddev->new_chunk_sectors = sb->new_chunk >> 9; in super_90_validate()
1316 mddev->new_chunk_sectors = mddev->chunk_sectors; in super_90_validate()
1452 sb->new_chunk = mddev->new_chunk_sectors << 9; in super_90_sync()
1844 mddev->new_chunk_sectors = le32_to_cpu(sb->new_chunk); in super_1_validate()
1855 mddev->new_chunk_sectors = mddev->chunk_sectors; in super_1_validate()
2057 sb->new_chunk = cpu_to_le32(mddev->new_chunk_sectors); in super_1_sync()
4007 mddev->new_chunk_sectors = mddev->chunk_sectors; in level_store()
4030 mddev->chunk_sectors = mddev->new_chunk_sectors; in level_store()
4232 mddev->chunk_sectors != mddev->new_chunk_sectors) in chunk_size_show()
4234 mddev->new_chunk_sectors << in chunk_size_show()
[all...]
H A Draid1.c3268 if (mddev->chunk_sectors != mddev->new_chunk_sectors || in raid1_reshape()
3271 mddev->new_chunk_sectors = mddev->chunk_sectors; in raid1_reshape()
3371 mddev->new_chunk_sectors = 0; in raid1_takeover()
H A Draid10.c3992 chunk = mddev->new_chunk_sectors; in setup_geo()
3999 chunk = mddev->new_chunk_sectors; in setup_geo()
4406 mddev->new_chunk_sectors = mddev->chunk_sectors; in raid10_takeover_raid0()
4451 * - chunk size (to ->new_chunk_sectors) in raid10_check_reshape()

Completed in 61 milliseconds