/kernel/linux/linux-6.6/drivers/infiniband/hw/irdma/ |
H A D | hw.c | 78 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_puda_ce_handler() 107 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_process_ceq() 215 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_process_aeq() 453 irdma_ena_intr(&rf->sc_dev, rf->iw_msixtbl[0].idx); in irdma_dpc() 466 irdma_ena_intr(&rf->sc_dev, iwceq->msix_idx); in irdma_ceq_dpc() 551 ibdev_err(to_ibdev(&iwceq->rf->sc_dev), "expected irq = %d received irq = %d\n", in irdma_ceq_handler() 569 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_destroy_irq() 592 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_destroy_cqp() 631 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_destroy_aeq() 636 rf->sc_dev in irdma_destroy_aeq() [all...] |
H A D | verbs.c | 18 struct irdma_hw_attrs *hw_attrs = &rf->sc_dev.hw_attrs; in irdma_query_device() 26 props->fw_ver = (u64)irdma_fw_major_ver(&rf->sc_dev) << 32 | in irdma_query_device() 27 irdma_fw_minor_ver(&rf->sc_dev); in irdma_query_device() 106 props->max_msg_sz = iwdev->rf->sc_dev.hw_attrs.max_hw_outbound_msg_size; in irdma_query_port() 128 pfn = ((uintptr_t)ucontext->iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET] + in irdma_mmap_legacy() 254 iwdev->rf->sc_dev.hw_attrs.max_hw_device_pages) { in irdma_alloc_push_page() 280 struct irdma_uk_attrs *uk_attrs = &iwdev->rf->sc_dev.hw_attrs.uk_attrs; in irdma_alloc_ucontext() 305 uresp.max_pds = iwdev->rf->sc_dev.hw_attrs.max_hw_pds; in irdma_alloc_ucontext() 306 uresp.wq_size = iwdev->rf->sc_dev.hw_attrs.max_qp_wr * 2; in irdma_alloc_ucontext() 312 u64 bar_off = (uintptr_t)iwdev->rf->sc_dev in irdma_alloc_ucontext() [all...] |
H A D | main.h | 299 struct irdma_sc_dev sc_dev; member 410 return container_of(dev, struct irdma_pci_f, sc_dev); in dev_to_rf()
|
H A D | main.c | 98 irdma_log_invalid_mtu(l2params.mtu, &iwdev->rf->sc_dev); in irdma_iidc_event_handler() 125 pe_criterr = readl(iwdev->rf->sc_dev.hw_regs[IRDMA_GLPE_CRITERR]); in irdma_iidc_event_handler()
|
H A D | utils.c | 533 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_cleanup_pending_cqp_op() 570 cqp_timeout.compl_cqp_cmds = atomic64_read(&rf->sc_dev.cqp->completed_ops); in irdma_wait_event() 578 irdma_check_cqp_progress(&cqp_timeout, &rf->sc_dev); in irdma_wait_event() 699 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_handle_cqp_op() 790 return &(container_of(dev, struct irdma_pci_f, sc_dev))->iwdev->ibdev; in to_ibdev() 1159 dma_free_coherent(rf->sc_dev.hw->device, iwqp->q2_ctx_mem.size, in irdma_free_qp_rsrc() 1162 dma_free_coherent(rf->sc_dev.hw->device, iwqp->kqp.dma_mem.size, in irdma_free_qp_rsrc() 1903 if (!rf->sc_dev.ceq_valid) in irdma_cqp_ws_node_cmd()
|
H A D | cm.c | 2204 if (irdma_puda_create_ah(&iwdev->rf->sc_dev, &ah_info, wait, in irdma_cm_create_ah() 2222 irdma_puda_free_ah(&iwdev->rf->sc_dev, cm_node->ah); in irdma_cm_free_ah() 2284 cm_node->dev = &iwdev->rf->sc_dev; in irdma_make_cm_node() 3153 if (iwdev->rf->sc_dev.hw_attrs.uk_attrs.hw_rev >= IRDMA_GEN_2) { in irdma_receive_ilq() 3265 cm_core->dev = &iwdev->rf->sc_dev; in irdma_setup_cm_core() 3623 dma_free_coherent(iwdev->rf->sc_dev.hw->device, in irdma_free_lsmm_rsrc() 3658 dev = &iwdev->rf->sc_dev; in irdma_accept() 4148 dev = &iwdev->rf->sc_dev; in irdma_cm_event_connected()
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/i40iw/ |
H A D | i40iw_main.c | 198 i40iw_enable_intr(&iwdev->sc_dev, iwdev->iw_msixtbl[0].idx); in i40iw_dpc() 211 i40iw_enable_intr(&iwdev->sc_dev, iwceq->msix_idx); in i40iw_ceq_dpc() 237 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_destroy_cqp() 284 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_destroy_aeq() 313 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_destroy_ceq() 341 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_dele_ceqs() 357 iwdev->sc_dev.ceq_valid = false; in i40iw_dele_ceqs() 369 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_destroy_ccq() 474 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_create_hmc_objs() 558 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_create_cqp() [all...] |
H A D | i40iw_hw.c | 60 max_qp = iwdev->sc_dev.hmc_info->hmc_obj[I40IW_HMC_IW_QP].cnt; in i40iw_initialize_hw_resources() 61 max_cq = iwdev->sc_dev.hmc_info->hmc_obj[I40IW_HMC_IW_CQ].cnt; in i40iw_initialize_hw_resources() 62 max_mr = iwdev->sc_dev.hmc_info->hmc_obj[I40IW_HMC_IW_MR].cnt; in i40iw_initialize_hw_resources() 63 arp_table_size = iwdev->sc_dev.hmc_info->hmc_obj[I40IW_HMC_IW_ARP].cnt; in i40iw_initialize_hw_resources() 125 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_cqp_ce_handler() 188 struct i40iw_sc_dev *dev = (struct i40iw_sc_dev *)&iwdev->sc_dev; in i40iw_puda_ce_handler() 216 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_process_ceq() 278 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_process_aeq() 594 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_manage_qhash()
|
H A D | i40iw_verbs.c | 67 props->fw_ver = i40iw_fw_major_ver(&iwdev->sc_dev) << 32 | in i40iw_query_device() 68 i40iw_fw_minor_ver(&iwdev->sc_dev); in i40iw_query_device() 72 props->hw_ver = (u32)iwdev->sc_dev.hw_rev; in i40iw_query_device() 263 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_alloc_pd() 356 i40iw_free_dma_mem(iwdev->sc_dev.hw, &iwqp->q2_ctx_mem); in i40iw_free_qp_resources() 357 i40iw_free_dma_mem(iwdev->sc_dev.hw, &iwqp->kqp.dma_mem); in i40iw_free_qp_resources() 404 i40iw_cqp_qp_destroy_cmd(&iwdev->sc_dev, &iwqp->sc_qp); in i40iw_destroy_qp() 476 status = i40iw_allocate_dma_mem(iwdev->sc_dev.hw, mem, size, 256); in i40iw_setup_kmode_qp() 522 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_create_qp() 945 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_modify_qp() [all...] |
H A D | i40iw_utils.c | 457 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_cleanup_pending_cqp_op() 494 cqp_timeout.compl_cqp_cmds = iwdev->sc_dev.cqp_cmd_stats[OP_COMPLETED_COMMANDS]; in i40iw_wait_event() 500 i40iw_check_cqp_progress(&cqp_timeout, &iwdev->sc_dev); in i40iw_wait_event() 535 struct i40iw_sc_dev *dev = &iwdev->sc_dev; in i40iw_handle_cqp_op() 1005 i40iw_debug(&iwdev->sc_dev, I40IW_DEBUG_HMC, "%s1\n", __func__); in i40iw_cqp_manage_hmc_fcn_callback() 1011 i40iw_debug(&iwdev->sc_dev, I40IW_DEBUG_HMC, "%s2\n", __func__); in i40iw_cqp_manage_hmc_fcn_callback() 1013 i40iw_debug(&iwdev->sc_dev, I40IW_DEBUG_HMC, "%s: Something wrong\n", __func__); in i40iw_cqp_manage_hmc_fcn_callback() 1030 i40iw_debug(&iwdev->sc_dev, I40IW_DEBUG_HMC, "%s\n", __func__); in i40iw_cqp_manage_hmc_fcn_cmd()
|
H A D | i40iw_cm.c | 1587 i40iw_debug(&iwdev->sc_dev, in i40iw_del_multiple_qhash() 1594 i40iw_debug(&iwdev->sc_dev, I40IW_DEBUG_CM, in i40iw_del_multiple_qhash() 1612 i40iw_debug(&iwdev->sc_dev, in i40iw_del_multiple_qhash() 1702 i40iw_debug(&iwdev->sc_dev, in i40iw_add_mqh_6() 1710 i40iw_debug(&iwdev->sc_dev, in i40iw_add_mqh_6() 1781 i40iw_debug(&iwdev->sc_dev, in i40iw_add_mqh_4() 1789 i40iw_debug(&iwdev->sc_dev, in i40iw_add_mqh_4() 2088 i40iw_debug(&iwdev->sc_dev, I40IW_DEBUG_CM, "dst_neigh_lookup MAC=%pM\n", neigh->ha); in i40iw_addr_resolve_neigh_ipv6() 2181 i40iw_debug(&iwdev->sc_dev, I40IW_DEBUG_DCB, in i40iw_make_cm_node() 2186 i40iw_debug(&iwdev->sc_dev, I40IW_DEBUG_DC in i40iw_make_cm_node() [all...] |
H A D | i40iw.h | 235 struct i40iw_sc_dev sc_dev; member
|