/kernel/linux/linux-5.10/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_wq.c | 34 #define WQ_SIZE(wq) ((wq)->q_depth * (wq)->wqebb_size) 497 * @wqebb_size: Work Queue Block Byte Size 505 u16 wqebb_size, u32 wq_page_size, u16 q_depth, in hinic_wq_allocate() 514 if (!is_power_of_2(wqebb_size)) { in hinic_wq_allocate() 515 dev_err(&pdev->dev, "wqebb_size must be power of 2\n"); in hinic_wq_allocate() 529 wqebb_size_shift = ilog2(wqebb_size); in hinic_wq_allocate() 530 num_wqebbs_per_page = ALIGN(wq_page_size, wqebb_size) in hinic_wq_allocate() 546 wq->wqebb_size = wqebb_size; in hinic_wq_allocate() 593 * @wqebb_size 504 hinic_wq_allocate(struct hinic_wqs *wqs, struct hinic_wq *wq, u16 wqebb_size, u32 wq_page_size, u16 q_depth, u16 max_wqe_size) hinic_wq_allocate() argument 600 hinic_wqs_cmdq_alloc(struct hinic_cmdq_pages *cmdq_pages, struct hinic_wq *wq, struct hinic_hwif *hwif, int cmdq_blocks, u16 wqebb_size, u32 wq_page_size, u16 q_depth, u16 max_wqe_size) hinic_wqs_cmdq_alloc() argument [all...] |
H A D | hinic_hw_wq.h | 28 u16 wqebb_size; member 79 int cmdq_blocks, u16 wqebb_size, u32 wq_page_size, 91 u16 wqebb_size, u32 wq_page_size, u16 q_depth,
|
H A D | hinic_hw_qp.h | 55 ALIGN(HINIC_SQ_WQE_SIZE(1), (wq)->wqebb_size) 58 (HINIC_MIN_TX_WQE_SIZE((sq)->wq) / (sq)->wq->wqebb_size)
|
H A D | hinic_hw_qp.c | 645 prod_idx += ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_sq_write_db() 739 *wqe_size = ALIGN(*wqe_size, sq->wq->wqebb_size); in hinic_sq_read_wqebb() 885 wqe_size = ALIGN(wqe_size, wq->wqebb_size); in hinic_rq_read_next_wqe() 886 num_wqebbs = wqe_size / wq->wqebb_size; in hinic_rq_read_next_wqe()
|
H A D | hinic_hw_cmdq.c | 366 num_wqebbs = ALIGN(WQE_LCMD_SIZE, wq->wqebb_size) / wq->wqebb_size; in cmdq_sync_cmd_direct_resp() 445 num_wqebbs = ALIGN(WQE_SCMD_SIZE, wq->wqebb_size) / wq->wqebb_size; in cmdq_set_arm_bit()
|
H A D | hinic_tx.c | 718 (((hw_ci - sw_ci) & wq->mask) * wq->wqebb_size < wqe_size)) in free_tx_poll() 724 if (wqe_size > wq->wqebb_size) { in free_tx_poll()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_wq.c | 34 #define WQ_SIZE(wq) ((wq)->q_depth * (wq)->wqebb_size) 496 * @wqebb_size: Work Queue Block Byte Size 504 u16 wqebb_size, u32 wq_page_size, u16 q_depth, in hinic_wq_allocate() 513 if (!is_power_of_2(wqebb_size)) { in hinic_wq_allocate() 514 dev_err(&pdev->dev, "wqebb_size must be power of 2\n"); in hinic_wq_allocate() 528 wqebb_size_shift = ilog2(wqebb_size); in hinic_wq_allocate() 529 num_wqebbs_per_page = ALIGN(wq_page_size, wqebb_size) in hinic_wq_allocate() 545 wq->wqebb_size = wqebb_size; in hinic_wq_allocate() 592 * @wqebb_size 503 hinic_wq_allocate(struct hinic_wqs *wqs, struct hinic_wq *wq, u16 wqebb_size, u32 wq_page_size, u16 q_depth, u16 max_wqe_size) hinic_wq_allocate() argument 599 hinic_wqs_cmdq_alloc(struct hinic_cmdq_pages *cmdq_pages, struct hinic_wq *wq, struct hinic_hwif *hwif, int cmdq_blocks, u16 wqebb_size, u32 wq_page_size, u16 q_depth, u16 max_wqe_size) hinic_wqs_cmdq_alloc() argument [all...] |
H A D | hinic_hw_wq.h | 28 u16 wqebb_size; member 79 int cmdq_blocks, u16 wqebb_size, u32 wq_page_size, 91 u16 wqebb_size, u32 wq_page_size, u16 q_depth,
|
H A D | hinic_hw_qp.h | 55 ALIGN(HINIC_SQ_WQE_SIZE(1), (wq)->wqebb_size) 58 (HINIC_MIN_TX_WQE_SIZE((sq)->wq) / (sq)->wq->wqebb_size)
|
H A D | hinic_hw_qp.c | 641 prod_idx += ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_sq_write_db() 735 *wqe_size = ALIGN(*wqe_size, sq->wq->wqebb_size); in hinic_sq_read_wqebb() 881 wqe_size = ALIGN(wqe_size, wq->wqebb_size); in hinic_rq_read_next_wqe() 882 num_wqebbs = wqe_size / wq->wqebb_size; in hinic_rq_read_next_wqe()
|
H A D | hinic_hw_cmdq.c | 361 num_wqebbs = ALIGN(WQE_LCMD_SIZE, wq->wqebb_size) / wq->wqebb_size; in cmdq_sync_cmd_direct_resp() 440 num_wqebbs = ALIGN(WQE_SCMD_SIZE, wq->wqebb_size) / wq->wqebb_size; in cmdq_set_arm_bit()
|
H A D | hinic_tx.c | 720 (((hw_ci - sw_ci) & wq->mask) * wq->wqebb_size < wqe_size)) in free_tx_poll() 726 if (wqe_size > wq->wqebb_size) { in free_tx_poll()
|