Lines Matching refs:sc_dev
78 struct irdma_sc_dev *dev = &rf->sc_dev;
107 struct irdma_sc_dev *dev = &rf->sc_dev;
215 struct irdma_sc_dev *dev = &rf->sc_dev;
453 irdma_ena_intr(&rf->sc_dev, rf->iw_msixtbl[0].idx);
466 irdma_ena_intr(&rf->sc_dev, iwceq->msix_idx);
551 ibdev_err(to_ibdev(&iwceq->rf->sc_dev), "expected irq = %d received irq = %d\n",
569 struct irdma_sc_dev *dev = &rf->sc_dev;
592 struct irdma_sc_dev *dev = &rf->sc_dev;
631 struct irdma_sc_dev *dev = &rf->sc_dev;
636 rf->sc_dev.irq_ops->irdma_cfg_aeq(&rf->sc_dev, rf->iw_msixtbl->idx, false);
667 struct irdma_sc_dev *dev = &rf->sc_dev;
702 rf->sc_dev.irq_ops->irdma_cfg_ceq(&rf->sc_dev,
712 rf->sc_dev.ceq_valid = false;
735 rf->sc_dev.irq_ops->irdma_cfg_ceq(&rf->sc_dev, msix_vec->ceq_id,
738 irdma_cqp_ceq_cmd(&rf->sc_dev, &iwceq->sc_ceq,
740 dma_free_coherent(rf->sc_dev.hw->device, iwceq->mem.size,
756 struct irdma_sc_dev *dev = &rf->sc_dev;
842 struct irdma_sc_dev *dev = &rf->sc_dev;
931 struct irdma_sc_dev *dev = &rf->sc_dev;
1035 struct irdma_sc_dev *dev = &rf->sc_dev;
1145 rf->sc_dev.irq_ops->irdma_cfg_ceq(&rf->sc_dev, ceq_id, msix_vec->idx, true);
1174 rf->sc_dev.irq_ops->irdma_cfg_aeq(&rf->sc_dev, msix_vec->idx, true);
1194 struct irdma_sc_dev *dev = &rf->sc_dev;
1199 ceq_size = min(rf->sc_dev.hmc_info->hmc_obj[IRDMA_HMC_IW_CQ].cnt,
1218 status = irdma_cqp_ceq_cmd(&rf->sc_dev, &iwceq->sc_ceq,
1249 num_ceqs = min(rf->msix_count, rf->sc_dev.hmc_fpm_misc.max_ceqs);
1275 irdma_ena_intr(&rf->sc_dev, msix_vec->idx);
1284 rf->sc_dev.ceq_valid = true;
1307 num_ceqs = min(rf->msix_count, rf->sc_dev.hmc_fpm_misc.max_ceqs);
1326 irdma_ena_intr(&rf->sc_dev, msix_vec->idx);
1382 struct irdma_sc_dev *dev = &rf->sc_dev;
1384 struct irdma_hmc_info *hmc_info = rf->sc_dev.hmc_info;
1448 struct irdma_sc_dev *dev = &rf->sc_dev;
1557 status = irdma_cfg_fpm_val(&rf->sc_dev, qpcnt);
1572 struct irdma_sc_dev *dev = &rf->sc_dev;
1604 struct irdma_sc_dev *dev = &rf->sc_dev;
1641 status = irdma_sc_dev_init(rf->rdma_ver, &rf->sc_dev, &info);
1666 if (iwdev->rf->sc_dev.hw_attrs.uk_attrs.hw_rev == IRDMA_GEN_1)
1770 irdma_del_hmc_objects(&rf->sc_dev, rf->sc_dev.hmc_info, true,
1798 struct irdma_sc_dev *dev = &rf->sc_dev;
1847 status = irdma_hmc_init_pble(&rf->sc_dev,
1866 if (iwdev->rf->sc_dev.hw_attrs.uk_attrs.hw_rev == IRDMA_GEN_1)
1899 struct irdma_sc_dev *dev = &rf->sc_dev;
2022 rf->max_cqe = rf->sc_dev.hw_attrs.uk_attrs.max_hw_cq_size;
2023 rf->max_qp = rf->sc_dev.hmc_info->hmc_obj[IRDMA_HMC_IW_QP].cnt;
2024 rf->max_mr = rf->sc_dev.hmc_info->hmc_obj[IRDMA_HMC_IW_MR].cnt;
2025 rf->max_cq = rf->sc_dev.hmc_info->hmc_obj[IRDMA_HMC_IW_CQ].cnt;
2026 rf->max_pd = rf->sc_dev.hw_attrs.max_hw_pds;
2027 rf->arp_table_size = rf->sc_dev.hmc_info->hmc_obj[IRDMA_HMC_IW_ARP].cnt;
2028 rf->max_ah = rf->sc_dev.hmc_info->hmc_obj[IRDMA_HMC_IW_FSIAV].cnt;
2078 struct irdma_sc_dev *dev = &rf->sc_dev;