/kernel/linux/linux-6.6/drivers/block/null_blk/ |
H A D | main.c | 755 static void put_tag(struct nullb_queue *nq, unsigned int tag) in put_tag() argument 757 clear_bit_unlock(tag, nq->tag_map); in put_tag() 759 if (waitqueue_active(&nq->wait)) in put_tag() 760 wake_up(&nq->wait); in put_tag() 763 static unsigned int get_tag(struct nullb_queue *nq) in get_tag() argument 768 tag = find_first_zero_bit(nq->tag_map, nq->queue_depth); in get_tag() 769 if (tag >= nq->queue_depth) in get_tag() 771 } while (test_and_set_bit_lock(tag, nq->tag_map)); in get_tag() 778 put_tag(cmd->nq, cm in free_cmd() 783 __alloc_cmd(struct nullb_queue *nq) __alloc_cmd() argument 805 alloc_cmd(struct nullb_queue *nq, struct bio *bio) alloc_cmd() argument 1541 struct nullb_queue *nq = nullb_to_queue(nullb); null_submit_bio() local 1643 struct nullb_queue *nq = hctx->driver_data; null_poll() local 1678 struct nullb_queue *nq = hctx->driver_data; null_timeout_rq() local 1710 struct nullb_queue *nq = hctx->driver_data; null_queue_rq() local 1753 cleanup_queue(struct nullb_queue *nq) cleanup_queue() argument 1771 struct nullb_queue *nq = hctx->driver_data; null_exit_hctx() local 1777 null_init_queue(struct nullb *nullb, struct nullb_queue *nq) null_init_queue() argument 1790 struct nullb_queue *nq; null_init_hctx() local 1877 setup_commands(struct nullb_queue *nq) setup_commands() argument 1918 struct nullb_queue *nq; init_driver_queues() local [all...] |
/kernel/linux/linux-6.6/drivers/infiniband/hw/bnxt_re/ |
H A D | qplib_fp.c | 159 struct bnxt_qplib_nq *nq = nq_work->nq; in bnxt_qpn_cqn_sched_task() local 161 if (cq && nq) { in bnxt_qpn_cqn_sched_task() 163 if (atomic_read(&cq->arm_state) && nq->cqn_handler) { in bnxt_qpn_cqn_sched_task() 164 dev_dbg(&nq->pdev->dev, in bnxt_qpn_cqn_sched_task() 165 "%s:Trigger cq = %p event nq = %p\n", in bnxt_qpn_cqn_sched_task() 166 __func__, cq, nq); in bnxt_qpn_cqn_sched_task() 167 nq->cqn_handler(nq, cq); in bnxt_qpn_cqn_sched_task() 235 static void clean_nq(struct bnxt_qplib_nq *nq, struc argument 301 struct bnxt_qplib_nq *nq = from_tasklet(nq, t, nq_tasklet); bnxt_qplib_service_nq() local 393 bnxt_re_synchronize_nq(struct bnxt_qplib_nq *nq) bnxt_re_synchronize_nq() argument 404 struct bnxt_qplib_nq *nq = dev_instance; bnxt_qplib_nq_irq() local 418 bnxt_qplib_nq_stop_irq(struct bnxt_qplib_nq *nq, bool kill) bnxt_qplib_nq_stop_irq() argument 438 bnxt_qplib_disable_nq(struct bnxt_qplib_nq *nq) bnxt_qplib_disable_nq() argument 458 bnxt_qplib_nq_start_irq(struct bnxt_qplib_nq *nq, int nq_indx, int msix_vector, bool need_init) bnxt_qplib_nq_start_irq() argument 499 bnxt_qplib_map_nq_db(struct bnxt_qplib_nq *nq, u32 reg_offt) bnxt_qplib_map_nq_db() argument 533 bnxt_qplib_enable_nq(struct pci_dev *pdev, struct bnxt_qplib_nq *nq, int nq_idx, int msix_vector, int bar_reg_offset, cqn_handler_t cqn_handler, srqn_handler_t srqn_handler) bnxt_qplib_enable_nq() argument 566 bnxt_qplib_free_nq(struct bnxt_qplib_nq *nq) bnxt_qplib_free_nq() argument 574 bnxt_qplib_alloc_nq(struct bnxt_qplib_res *res, struct bnxt_qplib_nq *nq) bnxt_qplib_alloc_nq() argument [all...] |
H A D | qplib_fp.h | 405 struct bnxt_qplib_nq *nq; member 467 typedef int (*cqn_handler_t)(struct bnxt_qplib_nq *nq, 469 typedef int (*srqn_handler_t)(struct bnxt_qplib_nq *nq, 492 struct bnxt_qplib_nq *nq; member 496 void bnxt_qplib_nq_stop_irq(struct bnxt_qplib_nq *nq, bool kill); 497 void bnxt_qplib_disable_nq(struct bnxt_qplib_nq *nq); 498 int bnxt_qplib_nq_start_irq(struct bnxt_qplib_nq *nq, int nq_indx, 500 int bnxt_qplib_enable_nq(struct pci_dev *pdev, struct bnxt_qplib_nq *nq, 545 void bnxt_qplib_free_nq(struct bnxt_qplib_nq *nq); 546 int bnxt_qplib_alloc_nq(struct bnxt_qplib_res *res, struct bnxt_qplib_nq *nq); [all...] |
H A D | main.c | 300 struct bnxt_qplib_nq *nq; in bnxt_re_stop_irq() local 304 nq = &rdev->nq[indx - 1]; in bnxt_re_stop_irq() 305 bnxt_qplib_nq_stop_irq(nq, false); in bnxt_re_stop_irq() 316 struct bnxt_qplib_nq *nq; in bnxt_re_start_irq() local 342 nq = &rdev->nq[indx - 1]; in bnxt_re_start_irq() 343 rc = bnxt_qplib_nq_start_irq(nq, indx - 1, in bnxt_re_start_irq() 1034 static int bnxt_re_srqn_handler(struct bnxt_qplib_nq *nq, in bnxt_re_srqn_handler() argument 1056 static int bnxt_re_cqn_handler(struct bnxt_qplib_nq *nq, in bnxt_re_cqn_handler() argument 1171 struct bnxt_qplib_nq *nq; bnxt_re_alloc_res() local [all...] |
H A D | ib_verbs.c | 924 scq_nq = qplib_qp->scq->nq; in bnxt_re_destroy_qp() 925 rcq_nq = qplib_qp->rcq->nq; in bnxt_re_destroy_qp() 1668 struct bnxt_qplib_nq *nq = NULL; in bnxt_re_destroy_srq() local 1671 nq = qplib_srq->cq->nq; in bnxt_re_destroy_srq() 1675 if (nq) in bnxt_re_destroy_srq() 1676 nq->budget--; in bnxt_re_destroy_srq() 1717 struct bnxt_qplib_nq *nq = NULL; in bnxt_re_create_srq() local 1758 srq->qplib_srq.eventq_hw_ring_id = rdev->nq[0].ring_id; in bnxt_re_create_srq() 1759 nq in bnxt_re_create_srq() 2903 struct bnxt_qplib_nq *nq; bnxt_re_destroy_cq() local 2927 struct bnxt_qplib_nq *nq = NULL; bnxt_re_create_cq() local [all...] |
/kernel/linux/linux-5.10/drivers/block/null_blk/ |
H A D | main.c | 614 static void put_tag(struct nullb_queue *nq, unsigned int tag) in put_tag() argument 616 clear_bit_unlock(tag, nq->tag_map); in put_tag() 618 if (waitqueue_active(&nq->wait)) in put_tag() 619 wake_up(&nq->wait); in put_tag() 622 static unsigned int get_tag(struct nullb_queue *nq) in get_tag() argument 627 tag = find_first_zero_bit(nq->tag_map, nq->queue_depth); in get_tag() 628 if (tag >= nq->queue_depth) in get_tag() 630 } while (test_and_set_bit_lock(tag, nq->tag_map)); in get_tag() 637 put_tag(cmd->nq, cm in free_cmd() 642 __alloc_cmd(struct nullb_queue *nq) __alloc_cmd() argument 664 alloc_cmd(struct nullb_queue *nq, int can_wait) alloc_cmd() argument 1424 struct nullb_queue *nq = nullb_to_queue(nullb); null_submit_bio() local 1475 struct nullb_queue *nq = hctx->driver_data; null_queue_rq() local 1512 cleanup_queue(struct nullb_queue *nq) cleanup_queue() argument 1530 struct nullb_queue *nq = hctx->driver_data; null_exit_hctx() local 1536 null_init_queue(struct nullb *nullb, struct nullb_queue *nq) null_init_queue() argument 1547 struct nullb_queue *nq; null_init_hctx() local 1631 setup_commands(struct nullb_queue *nq) setup_commands() argument 1669 struct nullb_queue *nq; init_driver_queues() local [all...] |
H A D | zoned.c | 336 struct nullb_device *dev = cmd->nq->dev; in null_zone_write() 539 struct nullb_device *dev = cmd->nq->dev; in null_zone_mgmt() 592 struct nullb_device *dev = cmd->nq->dev; in null_process_zoned_cmd()
|
H A D | null_blk.h | 21 struct nullb_queue *nq; member
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/bnxt_re/ |
H A D | qplib_fp.c | 158 struct bnxt_qplib_nq *nq = nq_work->nq; in bnxt_qpn_cqn_sched_task() local 160 if (cq && nq) { in bnxt_qpn_cqn_sched_task() 162 if (atomic_read(&cq->arm_state) && nq->cqn_handler) { in bnxt_qpn_cqn_sched_task() 163 dev_dbg(&nq->pdev->dev, in bnxt_qpn_cqn_sched_task() 164 "%s:Trigger cq = %p event nq = %p\n", in bnxt_qpn_cqn_sched_task() 165 __func__, cq, nq); in bnxt_qpn_cqn_sched_task() 166 nq->cqn_handler(nq, cq); in bnxt_qpn_cqn_sched_task() 234 static void clean_nq(struct bnxt_qplib_nq *nq, struc argument 300 struct bnxt_qplib_nq *nq = from_tasklet(nq, t, nq_tasklet); bnxt_qplib_service_nq() local 391 struct bnxt_qplib_nq *nq = dev_instance; bnxt_qplib_nq_irq() local 405 bnxt_qplib_nq_stop_irq(struct bnxt_qplib_nq *nq, bool kill) bnxt_qplib_nq_stop_irq() argument 425 bnxt_qplib_disable_nq(struct bnxt_qplib_nq *nq) bnxt_qplib_disable_nq() argument 445 bnxt_qplib_nq_start_irq(struct bnxt_qplib_nq *nq, int nq_indx, int msix_vector, bool need_init) bnxt_qplib_nq_start_irq() argument 486 bnxt_qplib_map_nq_db(struct bnxt_qplib_nq *nq, u32 reg_offt) bnxt_qplib_map_nq_db() argument 523 bnxt_qplib_enable_nq(struct pci_dev *pdev, struct bnxt_qplib_nq *nq, int nq_idx, int msix_vector, int bar_reg_offset, cqn_handler_t cqn_handler, srqn_handler_t srqn_handler) bnxt_qplib_enable_nq() argument 556 bnxt_qplib_free_nq(struct bnxt_qplib_nq *nq) bnxt_qplib_free_nq() argument 564 bnxt_qplib_alloc_nq(struct bnxt_qplib_res *res, struct bnxt_qplib_nq *nq) bnxt_qplib_alloc_nq() argument [all...] |
H A D | qplib_fp.h | 402 struct bnxt_qplib_nq *nq; member 464 typedef int (*cqn_handler_t)(struct bnxt_qplib_nq *nq, 466 typedef int (*srqn_handler_t)(struct bnxt_qplib_nq *nq, 489 struct bnxt_qplib_nq *nq; member 493 void bnxt_qplib_nq_stop_irq(struct bnxt_qplib_nq *nq, bool kill); 494 void bnxt_qplib_disable_nq(struct bnxt_qplib_nq *nq); 495 int bnxt_qplib_nq_start_irq(struct bnxt_qplib_nq *nq, int nq_indx, 497 int bnxt_qplib_enable_nq(struct pci_dev *pdev, struct bnxt_qplib_nq *nq, 538 void bnxt_qplib_free_nq(struct bnxt_qplib_nq *nq); 539 int bnxt_qplib_alloc_nq(struct bnxt_qplib_res *res, struct bnxt_qplib_nq *nq); [all...] |
H A D | main.c | 262 struct bnxt_qplib_nq *nq; in bnxt_re_stop_irq() local 266 nq = &rdev->nq[indx - 1]; in bnxt_re_stop_irq() 267 bnxt_qplib_nq_stop_irq(nq, false); in bnxt_re_stop_irq() 278 struct bnxt_qplib_nq *nq; in bnxt_re_start_irq() local 304 nq = &rdev->nq[indx - 1]; in bnxt_re_start_irq() 305 rc = bnxt_qplib_nq_start_irq(nq, indx - 1, in bnxt_re_start_irq() 892 static int bnxt_re_srqn_handler(struct bnxt_qplib_nq *nq, in bnxt_re_srqn_handler() argument 922 static int bnxt_re_cqn_handler(struct bnxt_qplib_nq *nq, in bnxt_re_cqn_handler() argument 1044 struct bnxt_qplib_nq *nq; bnxt_re_alloc_res() local [all...] |
H A D | ib_verbs.c | 1573 struct bnxt_qplib_nq *nq = NULL; in bnxt_re_destroy_srq() local 1576 nq = qplib_srq->cq->nq; in bnxt_re_destroy_srq() 1580 if (nq) in bnxt_re_destroy_srq() 1581 nq->budget--; in bnxt_re_destroy_srq() 1622 struct bnxt_qplib_nq *nq = NULL; in bnxt_re_create_srq() local 1662 srq->qplib_srq.eventq_hw_ring_id = rdev->nq[0].ring_id; in bnxt_re_create_srq() 1663 nq = &rdev->nq[0]; in bnxt_re_create_srq() 1689 if (nq) in bnxt_re_create_srq() 2803 struct bnxt_qplib_nq *nq; bnxt_re_destroy_cq() local 2827 struct bnxt_qplib_nq *nq = NULL; bnxt_re_create_cq() local [all...] |
H A D | bnxt_re.h | 163 struct bnxt_qplib_nq nq[BNXT_RE_MAX_MSIX]; member
|
/kernel/linux/linux-5.10/lib/crypto/ |
H A D | curve25519-hacl64.c | 546 ladder_smallloop_cmult_small_loop_step(u64 *nq, u64 *nqpq, u64 *nq2, u64 *nqpq2, in ladder_smallloop_cmult_small_loop_step() argument 551 point_swap_conditional(nq, nqpq, bit0); in ladder_smallloop_cmult_small_loop_step() 552 addanddouble_fmonty(nq2, nqpq2, nq, nqpq, q); in ladder_smallloop_cmult_small_loop_step() 558 ladder_smallloop_cmult_small_loop_double_step(u64 *nq, u64 *nqpq, u64 *nq2, in ladder_smallloop_cmult_small_loop_double_step() argument 562 ladder_smallloop_cmult_small_loop_step(nq, nqpq, nq2, nqpq2, q, byt); in ladder_smallloop_cmult_small_loop_double_step() 564 ladder_smallloop_cmult_small_loop_step(nq2, nqpq2, nq, nqpq, q, byt1); in ladder_smallloop_cmult_small_loop_double_step() 568 ladder_smallloop_cmult_small_loop(u64 *nq, u64 *nqpq, u64 *nq2, u64 *nqpq2, in ladder_smallloop_cmult_small_loop() argument 572 ladder_smallloop_cmult_small_loop_double_step(nq, nqpq, nq2, in ladder_smallloop_cmult_small_loop() 578 static __always_inline void ladder_bigloop_cmult_big_loop(u8 *n1, u64 *nq, in ladder_bigloop_cmult_big_loop() argument 585 ladder_smallloop_cmult_small_loop(nq, nqp in ladder_bigloop_cmult_big_loop() 593 u64 *nq = point_buf; ladder_cmult() local 778 u64 *nq = buf; curve25519_generic() local [all...] |
/kernel/linux/linux-6.6/lib/crypto/ |
H A D | curve25519-hacl64.c | 544 ladder_smallloop_cmult_small_loop_step(u64 *nq, u64 *nqpq, u64 *nq2, u64 *nqpq2, in ladder_smallloop_cmult_small_loop_step() argument 549 point_swap_conditional(nq, nqpq, bit0); in ladder_smallloop_cmult_small_loop_step() 550 addanddouble_fmonty(nq2, nqpq2, nq, nqpq, q); in ladder_smallloop_cmult_small_loop_step() 556 ladder_smallloop_cmult_small_loop_double_step(u64 *nq, u64 *nqpq, u64 *nq2, in ladder_smallloop_cmult_small_loop_double_step() argument 560 ladder_smallloop_cmult_small_loop_step(nq, nqpq, nq2, nqpq2, q, byt); in ladder_smallloop_cmult_small_loop_double_step() 562 ladder_smallloop_cmult_small_loop_step(nq2, nqpq2, nq, nqpq, q, byt1); in ladder_smallloop_cmult_small_loop_double_step() 566 ladder_smallloop_cmult_small_loop(u64 *nq, u64 *nqpq, u64 *nq2, u64 *nqpq2, in ladder_smallloop_cmult_small_loop() argument 570 ladder_smallloop_cmult_small_loop_double_step(nq, nqpq, nq2, in ladder_smallloop_cmult_small_loop() 576 static __always_inline void ladder_bigloop_cmult_big_loop(u8 *n1, u64 *nq, in ladder_bigloop_cmult_big_loop() argument 583 ladder_smallloop_cmult_small_loop(nq, nqp in ladder_bigloop_cmult_big_loop() 591 u64 *nq = point_buf; ladder_cmult() local 776 u64 *nq = buf; curve25519_generic() local [all...] |
/kernel/linux/linux-5.10/fs/xfs/ |
H A D | xfs_trans_dquot.c | 78 struct xfs_dqtrx *oq, *nq; in xfs_trans_dup_dqinfo() local 104 nq = &nqa[i]; in xfs_trans_dup_dqinfo() 109 nq->qt_dquot = oq->qt_dquot; in xfs_trans_dup_dqinfo() 110 nq->qt_bcount_delta = nq->qt_icount_delta = 0; in xfs_trans_dup_dqinfo() 111 nq->qt_rtbcount_delta = 0; in xfs_trans_dup_dqinfo() 116 nq->qt_blk_res = oq->qt_blk_res - blk_res_used; in xfs_trans_dup_dqinfo() 119 nq->qt_rtblk_res = oq->qt_rtblk_res - in xfs_trans_dup_dqinfo() 123 nq->qt_ino_res = oq->qt_ino_res - oq->qt_ino_res_used; in xfs_trans_dup_dqinfo()
|
/kernel/linux/linux-6.6/fs/xfs/ |
H A D | xfs_trans_dquot.c | 78 struct xfs_dqtrx *oq, *nq; in xfs_trans_dup_dqinfo() local 97 nq = &nqa[i]; in xfs_trans_dup_dqinfo() 102 nq->qt_dquot = oq->qt_dquot; in xfs_trans_dup_dqinfo() 103 nq->qt_bcount_delta = nq->qt_icount_delta = 0; in xfs_trans_dup_dqinfo() 104 nq->qt_rtbcount_delta = 0; in xfs_trans_dup_dqinfo() 109 nq->qt_blk_res = oq->qt_blk_res - blk_res_used; in xfs_trans_dup_dqinfo() 112 nq->qt_rtblk_res = oq->qt_rtblk_res - in xfs_trans_dup_dqinfo() 116 nq->qt_ino_res = oq->qt_ino_res - oq->qt_ino_res_used; in xfs_trans_dup_dqinfo()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/engleder/ |
H A D | tsnep_main.c | 336 struct netdev_queue *nq; in tsnep_tx_enable() local 338 nq = netdev_get_tx_queue(tx->adapter->netdev, tx->queue_index); in tsnep_tx_enable() 340 __netif_tx_lock_bh(nq); in tsnep_tx_enable() 341 netif_tx_wake_queue(nq); in tsnep_tx_enable() 342 __netif_tx_unlock_bh(nq); in tsnep_tx_enable() 347 struct netdev_queue *nq; in tsnep_tx_disable() local 350 nq = netdev_get_tx_queue(tx->adapter->netdev, tx->queue_index); in tsnep_tx_disable() 352 __netif_tx_lock_bh(nq); in tsnep_tx_disable() 353 netif_tx_stop_queue(nq); in tsnep_tx_disable() 354 __netif_tx_unlock_bh(nq); in tsnep_tx_disable() 761 struct netdev_queue *nq; tsnep_tx_poll() local 854 struct netdev_queue *nq; tsnep_tx_pending() local 2255 struct netdev_queue *nq; tsnep_netdev_xdp_xmit() local [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/chelsio/cxgb4/ |
H A D | cxgb4_uld.c | 111 unsigned int nq = rxq_info->nrxq + rxq_info->nciq; in alloc_uld_rxqs() local 125 for (i = 0; i < nq; i++, q++) { in alloc_uld_rxqs() 403 int nq = txq_info->ntxq; in free_sge_txq_uld() local 406 for (i = 0; i < nq; i++) { in free_sge_txq_uld() 426 int nq = txq_info->ntxq; in alloc_sge_txq_uld() local 429 j = nq / adap->params.nports; in alloc_sge_txq_uld() 430 for (i = 0; i < nq; i++) { in alloc_sge_txq_uld()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/chelsio/cxgb4/ |
H A D | cxgb4_uld.c | 111 unsigned int nq = rxq_info->nrxq + rxq_info->nciq; in alloc_uld_rxqs() local 125 for (i = 0; i < nq; i++, q++) { in alloc_uld_rxqs() 403 int nq = txq_info->ntxq; in free_sge_txq_uld() local 406 for (i = 0; i < nq; i++) { in free_sge_txq_uld() 426 int nq = txq_info->ntxq; in alloc_sge_txq_uld() local 429 j = nq / adap->params.nports; in alloc_sge_txq_uld() 430 for (i = 0; i < nq; i++) { in alloc_sge_txq_uld()
|
/kernel/linux/linux-6.6/drivers/net/vmxnet3/ |
H A D | vmxnet3_xdp.c | 205 struct netdev_queue *nq; in vmxnet3_xdp_xmit_back() local 212 nq = netdev_get_tx_queue(adapter->netdev, tq->qid); in vmxnet3_xdp_xmit_back() 214 __netif_tx_lock(nq, smp_processor_id()); in vmxnet3_xdp_xmit_back() 216 __netif_tx_unlock(nq); in vmxnet3_xdp_xmit_back()
|
/kernel/linux/linux-5.10/arch/x86/crypto/ |
H A D | curve25519-x86_64.c | 766 u64 *nq = p01_tmp1; in point_add_and_double() local 770 u64 *x2 = nq; in point_add_and_double() 771 u64 *z2 = nq + (u32)4U; in point_add_and_double() 813 fmul2(nq, dc1, ab1, tmp2); in point_add_and_double() 817 static void point_double(u64 *nq, u64 *tmp1, u64 *tmp2) in point_double() argument 819 u64 *x2 = nq; in point_double() 820 u64 *z2 = nq + (u32)4U; in point_double() 837 fmul2(nq, dc, ab, tmp2); in point_double()
|
/kernel/linux/linux-6.6/arch/x86/crypto/ |
H A D | curve25519-x86_64.c | 977 u64 *nq = p01_tmp1; in point_add_and_double() local 981 u64 *x2 = nq; in point_add_and_double() 982 u64 *z2 = nq + (u32)4U; in point_add_and_double() 1024 fmul2(nq, dc1, ab1, tmp2); in point_add_and_double() 1028 static void point_double(u64 *nq, u64 *tmp1, u64 *tmp2) in point_double() argument 1030 u64 *x2 = nq; in point_double() 1031 u64 *z2 = nq + (u32)4U; in point_double() 1048 fmul2(nq, dc, ab, tmp2); in point_double()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/marvell/ |
H A D | mvneta.c | 1834 struct netdev_queue *nq, bool napi) in mvneta_txq_bufs_free() 1864 netdev_tx_completed_queue(nq, pkts_compl, bytes_compl); in mvneta_txq_bufs_free() 1871 struct netdev_queue *nq = netdev_get_tx_queue(pp->dev, txq->id); in mvneta_txq_done() local 1878 mvneta_txq_bufs_free(pp, txq, tx_done, nq, true); in mvneta_txq_done() 1882 if (netif_tx_queue_stopped(nq)) { in mvneta_txq_done() 1884 netif_tx_wake_queue(nq); in mvneta_txq_done() 2090 struct netdev_queue *nq; in mvneta_xdp_xmit_back() local 2101 nq = netdev_get_tx_queue(pp->dev, txq->id); in mvneta_xdp_xmit_back() 2103 __netif_tx_lock(nq, cpu); in mvneta_xdp_xmit_back() 2118 __netif_tx_unlock(nq); in mvneta_xdp_xmit_back() 1832 mvneta_txq_bufs_free(struct mvneta_port *pp, struct mvneta_tx_queue *txq, int num, struct netdev_queue *nq, bool napi) mvneta_txq_bufs_free() argument 2132 struct netdev_queue *nq; mvneta_xdp_xmit() local 2830 struct netdev_queue *nq = netdev_get_tx_queue(dev, txq_id); mvneta_tx() local 2863 struct netdev_queue *nq = netdev_get_tx_queue(pp->dev, txq->id); mvneta_txq_done_force() local 2880 struct netdev_queue *nq; mvneta_tx_done_gbe() local 3457 struct netdev_queue *nq = netdev_get_tx_queue(pp->dev, txq->id); mvneta_txq_sw_deinit() local [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/marvell/ |
H A D | mvneta.c | 1870 struct netdev_queue *nq, bool napi) in mvneta_txq_bufs_free() 1909 netdev_tx_completed_queue(nq, pkts_compl, bytes_compl); in mvneta_txq_bufs_free() 1916 struct netdev_queue *nq = netdev_get_tx_queue(pp->dev, txq->id); in mvneta_txq_done() local 1923 mvneta_txq_bufs_free(pp, txq, tx_done, nq, true); in mvneta_txq_done() 1927 if (netif_tx_queue_stopped(nq)) { in mvneta_txq_done() 1929 netif_tx_wake_queue(nq); in mvneta_txq_done() 2178 struct netdev_queue *nq; in mvneta_xdp_xmit_back() local 2189 nq = netdev_get_tx_queue(pp->dev, txq->id); in mvneta_xdp_xmit_back() 2191 __netif_tx_lock(nq, cpu); in mvneta_xdp_xmit_back() 2206 __netif_tx_unlock(nq); in mvneta_xdp_xmit_back() 1868 mvneta_txq_bufs_free(struct mvneta_port *pp, struct mvneta_tx_queue *txq, int num, struct netdev_queue *nq, bool napi) mvneta_txq_bufs_free() argument 2220 struct netdev_queue *nq; mvneta_xdp_xmit() local 2976 struct netdev_queue *nq = netdev_get_tx_queue(dev, txq_id); mvneta_tx() local 3009 struct netdev_queue *nq = netdev_get_tx_queue(pp->dev, txq->id); mvneta_txq_done_force() local 3026 struct netdev_queue *nq; mvneta_tx_done_gbe() local 3602 struct netdev_queue *nq = netdev_get_tx_queue(pp->dev, txq->id); mvneta_txq_sw_deinit() local [all...] |