Home
last modified time | relevance | path

Searched refs:queue_mask (Results 1 - 25 of 39) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/soc/ti/
H A Dknav_qmss_acc.c283 cmd->command, cmd->queue_mask, cmd->list_dma, in knav_acc_write()
289 writel_relaxed(cmd->queue_mask, &pdsp->acc_command->queue_mask); in knav_acc_write()
308 u32 queue_mask; in knav_acc_setup_cmd() local
313 queue_mask = BIT(range->num_queues) - 1; in knav_acc_setup_cmd()
317 queue_mask = 0; in knav_acc_setup_cmd()
322 cmd->queue_mask = queue_mask; in knav_acc_setup_cmd()
H A Dknav_qmss.h89 u32 queue_mask; member
/kernel/linux/linux-6.6/drivers/soc/ti/
H A Dknav_qmss_acc.c283 cmd->command, cmd->queue_mask, cmd->list_dma, in knav_acc_write()
289 writel_relaxed(cmd->queue_mask, &pdsp->acc_command->queue_mask); in knav_acc_write()
308 u32 queue_mask; in knav_acc_setup_cmd() local
313 queue_mask = BIT(range->num_queues) - 1; in knav_acc_setup_cmd()
317 queue_mask = 0; in knav_acc_setup_cmd()
322 cmd->queue_mask = queue_mask; in knav_acc_setup_cmd()
H A Dknav_qmss.h89 u32 queue_mask; member
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_packet_manager_vi.c135 packet->queue_mask_lo = lower_32_bits(res->queue_mask); in pm_set_resources_vi()
136 packet->queue_mask_hi = upper_32_bits(res->queue_mask); in pm_set_resources_vi()
H A Dkfd_packet_manager_v9.c136 packet->queue_mask_lo = lower_32_bits(res->queue_mask); in pm_set_resources_v9()
137 packet->queue_mask_hi = upper_32_bits(res->queue_mask); in pm_set_resources_v9()
H A Dkfd_device_queue_manager.c1094 res.queue_mask = 0; in set_sched_resources()
1108 * definition of res.queue_mask needs updating in set_sched_resources()
1110 if (WARN_ON(i >= (sizeof(res.queue_mask)*8))) { in set_sched_resources()
1115 res.queue_mask |= 1ull in set_sched_resources()
1125 res.vmid_mask, res.queue_mask); in set_sched_resources()
H A Dkfd_priv.h549 uint64_t queue_mask; member
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_packet_manager_vi.c136 packet->queue_mask_lo = lower_32_bits(res->queue_mask); in pm_set_resources_vi()
137 packet->queue_mask_hi = upper_32_bits(res->queue_mask); in pm_set_resources_vi()
H A Dkfd_packet_manager_v9.c200 packet->queue_mask_lo = lower_32_bits(res->queue_mask); in pm_set_resources_v9()
201 packet->queue_mask_hi = upper_32_bits(res->queue_mask); in pm_set_resources_v9()
H A Dkfd_device_queue_manager.c1569 res.queue_mask = 0; in set_sched_resources()
1583 * definition of res.queue_mask needs updating in set_sched_resources()
1585 if (WARN_ON(i >= (sizeof(res.queue_mask)*8))) { in set_sched_resources()
1590 res.queue_mask |= 1ull in set_sched_resources()
1600 res.vmid_mask, res.queue_mask); in set_sched_resources()
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_gfx.c498 uint64_t queue_mask = 0; in amdgpu_gfx_enable_kcq() local
510 * definition of queue_mask needs updating */ in amdgpu_gfx_enable_kcq()
511 if (WARN_ON(i > (sizeof(queue_mask)*8))) { in amdgpu_gfx_enable_kcq()
516 queue_mask |= (1ull << amdgpu_queue_mask_bit_to_set_resource_bit(adev, i)); in amdgpu_gfx_enable_kcq()
530 kiq->pmf->kiq_set_resources(kiq_ring, queue_mask); in amdgpu_gfx_enable_kcq()
H A Damdgpu_gfx.h77 uint64_t queue_mask);
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_gfx.c583 uint64_t queue_mask = 0; in amdgpu_gfx_enable_kcq() local
595 * definition of queue_mask needs updating */ in amdgpu_gfx_enable_kcq()
596 if (WARN_ON(i > (sizeof(queue_mask)*8))) { in amdgpu_gfx_enable_kcq()
601 queue_mask |= (1ull << amdgpu_queue_mask_bit_to_set_resource_bit(adev, i)); in amdgpu_gfx_enable_kcq()
619 queue_mask = ~0ULL; in amdgpu_gfx_enable_kcq()
621 kiq->pmf->kiq_set_resources(kiq_ring, queue_mask); in amdgpu_gfx_enable_kcq()
H A Damdgpu_gfx.h132 uint64_t queue_mask);
/kernel/linux/linux-5.10/drivers/net/ethernet/marvell/
H A Dmv643xx_eth.c2248 u8 queue_mask; in mv643xx_eth_poll() local
2259 queue_mask = mp->work_tx | mp->work_tx_end | mp->work_rx; in mv643xx_eth_poll()
2261 queue_mask |= mp->work_rx_refill; in mv643xx_eth_poll()
2263 if (!queue_mask) { in mv643xx_eth_poll()
2269 queue = fls(queue_mask) - 1; in mv643xx_eth_poll()
2270 queue_mask = 1 << queue; in mv643xx_eth_poll()
2276 if (mp->work_tx_end & queue_mask) { in mv643xx_eth_poll()
2278 } else if (mp->work_tx & queue_mask) { in mv643xx_eth_poll()
2281 } else if (mp->work_rx & queue_mask) { in mv643xx_eth_poll()
2283 } else if (!mp->oom && (mp->work_rx_refill & queue_mask)) { in mv643xx_eth_poll()
[all...]
/kernel/linux/linux-5.10/net/ethtool/
H A Dioctl.c2441 DECLARE_BITMAP(queue_mask, MAX_NUM_QUEUE); in ethtool_get_per_queue_coalesce()
2448 bitmap_from_arr32(queue_mask, per_queue_opt->queue_mask, in ethtool_get_per_queue_coalesce()
2451 for_each_set_bit(bit, queue_mask, MAX_NUM_QUEUE) { in ethtool_get_per_queue_coalesce()
2474 DECLARE_BITMAP(queue_mask, MAX_NUM_QUEUE); in ethtool_set_per_queue_coalesce()
2482 bitmap_from_arr32(queue_mask, per_queue_opt->queue_mask, MAX_NUM_QUEUE); in ethtool_set_per_queue_coalesce()
2483 n_queue = bitmap_weight(queue_mask, MAX_NUM_QUEUE); in ethtool_set_per_queue_coalesce()
2488 for_each_set_bit(bit, queue_mask, MAX_NUM_QUEUE) { in ethtool_set_per_queue_coalesce()
2517 for_each_set_bit(i, queue_mask, bi in ethtool_set_per_queue_coalesce()
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/marvell/
H A Dmv643xx_eth.c2259 u8 queue_mask; in mv643xx_eth_poll() local
2270 queue_mask = mp->work_tx | mp->work_tx_end | mp->work_rx; in mv643xx_eth_poll()
2272 queue_mask |= mp->work_rx_refill; in mv643xx_eth_poll()
2274 if (!queue_mask) { in mv643xx_eth_poll()
2280 queue = fls(queue_mask) - 1; in mv643xx_eth_poll()
2281 queue_mask = 1 << queue; in mv643xx_eth_poll()
2287 if (mp->work_tx_end & queue_mask) { in mv643xx_eth_poll()
2289 } else if (mp->work_tx & queue_mask) { in mv643xx_eth_poll()
2292 } else if (mp->work_rx & queue_mask) { in mv643xx_eth_poll()
2294 } else if (!mp->oom && (mp->work_rx_refill & queue_mask)) { in mv643xx_eth_poll()
[all...]
/kernel/linux/linux-6.6/net/ethtool/
H A Dioctl.c2527 DECLARE_BITMAP(queue_mask, MAX_NUM_QUEUE); in ethtool_get_per_queue_coalesce()
2534 bitmap_from_arr32(queue_mask, per_queue_opt->queue_mask, in ethtool_get_per_queue_coalesce()
2537 for_each_set_bit(bit, queue_mask, MAX_NUM_QUEUE) { in ethtool_get_per_queue_coalesce()
2560 DECLARE_BITMAP(queue_mask, MAX_NUM_QUEUE); in ethtool_set_per_queue_coalesce()
2568 bitmap_from_arr32(queue_mask, per_queue_opt->queue_mask, MAX_NUM_QUEUE); in ethtool_set_per_queue_coalesce()
2569 n_queue = bitmap_weight(queue_mask, MAX_NUM_QUEUE); in ethtool_set_per_queue_coalesce()
2574 for_each_set_bit(bit, queue_mask, MAX_NUM_QUEUE) { in ethtool_set_per_queue_coalesce()
2603 for_each_set_bit(i, queue_mask, bi in ethtool_set_per_queue_coalesce()
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/cadence/
H A Dmacb_main.c3586 unsigned int *queue_mask, in macb_probe_queues()
3589 *queue_mask = 0x1; in macb_probe_queues()
3602 *queue_mask |= readl_relaxed(mem + GEM_DCFG6) & 0xff; in macb_probe_queues()
3603 *num_queues = hweight32(*queue_mask); in macb_probe_queues()
3716 if (!(bp->queue_mask & (1 << hw_q))) in macb_init()
4481 unsigned int queue_mask, num_queues; in macb_probe() local
4518 macb_probe_queues(mem, native_io, &queue_mask, &num_queues); in macb_probe()
4542 bp->queue_mask = queue_mask; in macb_probe()
3584 macb_probe_queues(void __iomem *mem, bool native_io, unsigned int *queue_mask, unsigned int *num_queues) macb_probe_queues() argument
H A Dmacb.h1183 unsigned int queue_mask; member
/kernel/linux/linux-6.6/drivers/net/ethernet/cadence/
H A Dmacb_main.c3932 unsigned int *queue_mask, in macb_probe_queues()
3935 *queue_mask = 0x1; in macb_probe_queues()
3948 *queue_mask |= readl_relaxed(mem + GEM_DCFG6) & 0xff; in macb_probe_queues()
3949 *num_queues = hweight32(*queue_mask); in macb_probe_queues()
4068 if (!(bp->queue_mask & (1 << hw_q))) in macb_init()
4953 unsigned int queue_mask, num_queues; in macb_probe() local
4989 macb_probe_queues(mem, native_io, &queue_mask, &num_queues); in macb_probe()
5013 bp->queue_mask = queue_mask; in macb_probe()
3930 macb_probe_queues(void __iomem *mem, bool native_io, unsigned int *queue_mask, unsigned int *num_queues) macb_probe_queues() argument
/kernel/linux/linux-5.10/net/sched/
H A Dsch_taprio.c1199 u32 i, queue_mask = 0; in tc_map_to_queue_mask() local
1210 queue_mask |= GENMASK(offset + count - 1, offset); in tc_map_to_queue_mask()
1213 return queue_mask; in tc_map_to_queue_mask()
/kernel/linux/patches/linux-4.19/prebuilts/usr/include/linux/
H A Dethtool.h443 __u32 queue_mask[__KERNEL_DIV_ROUND_UP(MAX_NUM_QUEUE, 32)]; member
/kernel/linux/linux-6.6/net/sched/
H A Dsch_taprio.c1436 u32 i, queue_mask = 0; in tc_map_to_queue_mask() local
1447 queue_mask |= GENMASK(offset + count - 1, offset); in tc_map_to_queue_mask()
1450 return queue_mask; in tc_map_to_queue_mask()

Completed in 56 milliseconds

12