/kernel/linux/linux-5.10/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_wq.c | 701 int num_wqebbs, u16 idx) in copy_wqe_to_shadow() 706 for (i = 0; i < num_wqebbs; i++, idx++) { in copy_wqe_to_shadow() 718 int num_wqebbs, u16 idx) in copy_wqe_from_shadow() 723 for (i = 0; i < num_wqebbs; i++, idx++) { in copy_wqe_from_shadow() 744 int curr_pg, end_pg, num_wqebbs; in hinic_get_wqe() local 749 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) >> wq->wqebb_size_shift; in hinic_get_wqe() 751 if (atomic_sub_return(num_wqebbs, &wq->delta) <= 0) { in hinic_get_wqe() 752 atomic_add(num_wqebbs, &wq->delta); in hinic_get_wqe() 756 end_prod_idx = atomic_add_return(num_wqebbs, &wq->prod_idx); in hinic_get_wqe() 759 curr_prod_idx = end_prod_idx - num_wqebbs; in hinic_get_wqe() 700 copy_wqe_to_shadow(struct hinic_wq *wq, void *shadow_addr, int num_wqebbs, u16 idx) copy_wqe_to_shadow() argument 717 copy_wqe_from_shadow(struct hinic_wq *wq, void *shadow_addr, int num_wqebbs, u16 idx) copy_wqe_from_shadow() argument 792 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; hinic_return_wqe() local 806 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) hinic_put_wqe() local 825 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) hinic_read_wqe() local 892 int curr_pg, num_wqebbs; hinic_write_wqe() local [all...] |
H A D | hinic_hw_cmdq.c | 347 int errcode, wrapped, num_wqebbs; in cmdq_sync_cmd_direct_resp() local 366 num_wqebbs = ALIGN(WQE_LCMD_SIZE, wq->wqebb_size) / wq->wqebb_size; in cmdq_sync_cmd_direct_resp() 367 next_prod_idx = curr_prod_idx + num_wqebbs; in cmdq_sync_cmd_direct_resp() 429 int wrapped, num_wqebbs; in cmdq_set_arm_bit() local 445 num_wqebbs = ALIGN(WQE_SCMD_SIZE, wq->wqebb_size) / wq->wqebb_size; in cmdq_set_arm_bit() 446 next_prod_idx = curr_prod_idx + num_wqebbs; in cmdq_set_arm_bit()
|
H A D | hinic_hw_qp.c | 883 unsigned int num_wqebbs; in hinic_rq_read_next_wqe() local 886 num_wqebbs = wqe_size / wq->wqebb_size; in hinic_rq_read_next_wqe() 888 *cons_idx = RQ_MASKED_IDX(rq, *cons_idx + num_wqebbs); in hinic_rq_read_next_wqe()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_wq.c | 700 int num_wqebbs, u16 idx) in copy_wqe_to_shadow() 705 for (i = 0; i < num_wqebbs; i++, idx++) { in copy_wqe_to_shadow() 717 int num_wqebbs, u16 idx) in copy_wqe_from_shadow() 722 for (i = 0; i < num_wqebbs; i++, idx++) { in copy_wqe_from_shadow() 743 int curr_pg, end_pg, num_wqebbs; in hinic_get_wqe() local 748 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) >> wq->wqebb_size_shift; in hinic_get_wqe() 750 if (atomic_sub_return(num_wqebbs, &wq->delta) <= 0) { in hinic_get_wqe() 751 atomic_add(num_wqebbs, &wq->delta); in hinic_get_wqe() 755 end_prod_idx = atomic_add_return(num_wqebbs, &wq->prod_idx); in hinic_get_wqe() 758 curr_prod_idx = end_prod_idx - num_wqebbs; in hinic_get_wqe() 699 copy_wqe_to_shadow(struct hinic_wq *wq, void *shadow_addr, int num_wqebbs, u16 idx) copy_wqe_to_shadow() argument 716 copy_wqe_from_shadow(struct hinic_wq *wq, void *shadow_addr, int num_wqebbs, u16 idx) copy_wqe_from_shadow() argument 791 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; hinic_return_wqe() local 805 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) hinic_put_wqe() local 824 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) hinic_read_wqe() local 891 int curr_pg, num_wqebbs; hinic_write_wqe() local [all...] |
H A D | hinic_hw_cmdq.c | 342 int errcode, wrapped, num_wqebbs; in cmdq_sync_cmd_direct_resp() local 361 num_wqebbs = ALIGN(WQE_LCMD_SIZE, wq->wqebb_size) / wq->wqebb_size; in cmdq_sync_cmd_direct_resp() 362 next_prod_idx = curr_prod_idx + num_wqebbs; in cmdq_sync_cmd_direct_resp() 424 int wrapped, num_wqebbs; in cmdq_set_arm_bit() local 440 num_wqebbs = ALIGN(WQE_SCMD_SIZE, wq->wqebb_size) / wq->wqebb_size; in cmdq_set_arm_bit() 441 next_prod_idx = curr_prod_idx + num_wqebbs; in cmdq_set_arm_bit()
|
H A D | hinic_hw_qp.c | 879 unsigned int num_wqebbs; in hinic_rq_read_next_wqe() local 882 num_wqebbs = wqe_size / wq->wqebb_size; in hinic_rq_read_next_wqe() 884 *cons_idx = RQ_MASKED_IDX(rq, *cons_idx + num_wqebbs); in hinic_rq_read_next_wqe()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
H A D | ktls_tx.c | 139 u16 pi, u8 num_wqebbs, u32 num_bytes, in tx_fill_wi() 145 .num_wqebbs = num_wqebbs, in tx_fill_wi() 167 u16 pi, num_wqebbs; in post_static_params() local 169 num_wqebbs = MLX5E_TLS_SET_STATIC_PARAMS_WQEBBS; in post_static_params() 170 pi = mlx5e_txqsq_get_next_pi(sq, num_wqebbs); in post_static_params() 175 tx_fill_wi(sq, pi, num_wqebbs, 0, NULL); in post_static_params() 176 sq->pc += num_wqebbs; in post_static_params() 185 u16 pi, num_wqebbs; in post_progress_params() local 187 num_wqebbs in post_progress_params() 138 tx_fill_wi(struct mlx5e_txqsq *sq, u16 pi, u8 num_wqebbs, u32 num_bytes, struct page *page) tx_fill_wi() argument [all...] |
H A D | ktls_rx.c | 140 u16 pi, num_wqebbs, room; in post_static_params() local 142 num_wqebbs = MLX5E_TLS_SET_STATIC_PARAMS_WQEBBS; in post_static_params() 143 room = mlx5e_stop_room_for_wqe(num_wqebbs); in post_static_params() 147 pi = mlx5e_icosq_get_next_pi(sq, num_wqebbs); in post_static_params() 155 .num_wqebbs = num_wqebbs, in post_static_params() 159 sq->pc += num_wqebbs; in post_static_params() 171 u16 pi, num_wqebbs, room; in post_progress_params() local 173 num_wqebbs = MLX5E_TLS_SET_PROGRESS_PARAMS_WQEBBS; in post_progress_params() 174 room = mlx5e_stop_room_for_wqe(num_wqebbs); in post_progress_params() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
H A D | ktls_rx.c | 141 u16 pi, num_wqebbs; in post_static_params() local 143 num_wqebbs = MLX5E_TLS_SET_STATIC_PARAMS_WQEBBS; in post_static_params() 144 if (unlikely(!mlx5e_icosq_can_post_wqe(sq, num_wqebbs))) in post_static_params() 147 pi = mlx5e_icosq_get_next_pi(sq, num_wqebbs); in post_static_params() 156 .num_wqebbs = num_wqebbs, in post_static_params() 160 sq->pc += num_wqebbs; in post_static_params() 172 u16 pi, num_wqebbs; in post_progress_params() local 174 num_wqebbs = MLX5E_TLS_SET_PROGRESS_PARAMS_WQEBBS; in post_progress_params() 175 if (unlikely(!mlx5e_icosq_can_post_wqe(sq, num_wqebbs))) in post_progress_params() [all...] |
H A D | ktls_tx.c | 526 u16 pi, u8 num_wqebbs, u32 num_bytes, in tx_fill_wi() 532 .num_wqebbs = num_wqebbs, in tx_fill_wi() 554 u16 pi, num_wqebbs; in post_static_params() local 556 num_wqebbs = MLX5E_TLS_SET_STATIC_PARAMS_WQEBBS; in post_static_params() 557 pi = mlx5e_txqsq_get_next_pi(sq, num_wqebbs); in post_static_params() 563 tx_fill_wi(sq, pi, num_wqebbs, 0, NULL); in post_static_params() 564 sq->pc += num_wqebbs; in post_static_params() 573 u16 pi, num_wqebbs; in post_progress_params() local 575 num_wqebbs in post_progress_params() 525 tx_fill_wi(struct mlx5e_txqsq *sq, u16 pi, u8 num_wqebbs, u32 num_bytes, struct page *page) tx_fill_wi() argument [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/en/ |
H A D | xdp.c | 319 .num_wqebbs = 1, in mlx5e_xdpsq_get_next_pi() 367 wi->num_wqebbs = DIV_ROUND_UP(ds_count, MLX5_SEND_WQEBB_NUM_DS); in mlx5e_xdp_mpwqe_complete() 370 sq->pc += wi->num_wqebbs; in mlx5e_xdp_mpwqe_complete() 497 u8 num_wqebbs = 1; in mlx5e_xmit_xdp_frame() local 528 num_wqebbs = DIV_ROUND_UP(ds_cnt, MLX5_SEND_WQEBB_NUM_DS); in mlx5e_xmit_xdp_frame() 532 stop_room = MLX5E_STOP_ROOM(num_wqebbs); in mlx5e_xmit_xdp_frame() 540 pi = mlx5e_xdpsq_get_next_pi(sq, num_wqebbs); in mlx5e_xmit_xdp_frame() 593 .num_wqebbs = num_wqebbs, in mlx5e_xmit_xdp_frame() 597 sq->pc += num_wqebbs; in mlx5e_xmit_xdp_frame() [all...] |
H A D | txrx.h | 160 u8 num_wqebbs; member 184 .num_wqebbs = 1, in mlx5e_txqsq_get_next_pi() 209 u8 num_wqebbs; member 247 .num_wqebbs = 1, in mlx5e_icosq_get_next_pi()
|
H A D | xdp.h | 190 u8 num_wqebbs; member
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/ |
H A D | en_tx.c | 274 u8 num_wqebbs; member 360 .num_wqebbs = DIV_ROUND_UP(ds_cnt, MLX5_SEND_WQEBB_NUM_DS), in mlx5e_sq_calc_wqe_attr() 392 .num_wqebbs = wqe_attr->num_wqebbs, in mlx5e_txwqe_complete() 401 sq->pc += wi->num_wqebbs; in mlx5e_txwqe_complete() 544 .num_wqebbs = DIV_ROUND_UP(ds_count, MLX5_SEND_WQEBB_NUM_DS), in mlx5e_tx_mpwqe_session_complete() 549 sq->pc += wi->num_wqebbs; in mlx5e_tx_mpwqe_session_complete() 662 pi = mlx5e_txqsq_get_next_pi(sq, wqe_attr.num_wqebbs); in mlx5e_xmit() 685 pi = mlx5e_txqsq_get_next_pi(sq, wqe_attr.num_wqebbs); in mlx5e_sq_xmit_simple() 779 sqcc += wi->num_wqebbs; in mlx5e_poll_tx_cq() [all...] |
H A D | en_txrx.c | 83 .num_wqebbs = 1, in mlx5e_trigger_irq()
|
H A D | en_rx.c | 545 .num_wqebbs = MLX5E_UMR_WQEBBS, in mlx5e_alloc_rx_mpwqe() 620 sqcc += wi->num_wqebbs; in mlx5e_free_icosq_descs() 671 sqcc += wi->num_wqebbs; in mlx5e_poll_ico_cq()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/ |
H A D | en_tx.c | 239 u8 num_wqebbs; member 332 .num_wqebbs = DIV_ROUND_UP(ds_cnt, MLX5_SEND_WQEBB_NUM_DS), in mlx5e_sq_calc_wqe_attr() 363 .num_wqebbs = 1, in mlx5e_tx_flush() 384 .num_wqebbs = wqe_attr->num_wqebbs, in mlx5e_txwqe_complete() 393 sq->pc += wi->num_wqebbs; in mlx5e_txwqe_complete() 584 .num_wqebbs = DIV_ROUND_UP(ds_count, MLX5_SEND_WQEBB_NUM_DS), in mlx5e_tx_mpwqe_session_complete() 589 sq->pc += wi->num_wqebbs; in mlx5e_tx_mpwqe_session_complete() 721 pi = mlx5e_txqsq_get_next_pi(sq, wqe_attr.num_wqebbs); in mlx5e_xmit() 836 sqcc += wi->num_wqebbs; in mlx5e_poll_tx_cq() [all...] |
H A D | en_txrx.c | 81 .num_wqebbs = 1, in mlx5e_trigger_irq()
|
H A D | en_rx.c | 696 .num_wqebbs = wqe_bbs, in mlx5e_build_shampo_hd_umr() 818 .num_wqebbs = rq->mpwqe.umr_wqebbs, in mlx5e_alloc_rx_mpwqe() 959 sqcc += wi->num_wqebbs; in mlx5e_free_icosq_descs() 1032 sqcc += wi->num_wqebbs; in mlx5e_poll_ico_cq() 1040 mlx5_wq_cyc_wqe_dump(&sq->wq, ci, wi->num_wqebbs); in mlx5e_poll_ico_cq()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en/ |
H A D | xdp.c | 182 .num_wqebbs = 1, in mlx5e_xdpsq_get_next_pi() 230 wi->num_wqebbs = DIV_ROUND_UP(ds_count, MLX5_SEND_WQEBB_NUM_DS); in mlx5e_xdp_mpwqe_complete() 233 sq->pc += wi->num_wqebbs; in mlx5e_xdp_mpwqe_complete() 435 sqcc += wi->num_wqebbs; in mlx5e_poll_xdpsq_cq() 446 mlx5_wq_cyc_wqe_dump(&sq->wq, ci, wi->num_wqebbs); in mlx5e_poll_xdpsq_cq() 475 sq->cc += wi->num_wqebbs; in mlx5e_free_xdpsq_descs()
|
H A D | txrx.h | 124 u8 num_wqebbs; member 148 .num_wqebbs = 1, in mlx5e_txqsq_get_next_pi() 162 u8 num_wqebbs; member 199 .num_wqebbs = 1, in mlx5e_icosq_get_next_pi()
|
H A D | xdp.h | 135 u8 num_wqebbs; member
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
H A D | tx.c | 59 wi->num_wqebbs = 1; in mlx5e_xsk_tx_post_err()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
H A D | tx.c | 53 wi->num_wqebbs = 1; in mlx5e_xsk_tx_post_err()
|
H A D | rx.c | 141 .num_wqebbs = rq->mpwqe.umr_wqebbs, in mlx5e_xsk_alloc_rx_mpwqe()
|