/kernel/linux/linux-5.10/drivers/iommu/arm/arm-smmu/ |
H A D | arm-smmu.c | 245 static void arm_smmu_tlb_sync_context(struct arm_smmu_domain *smmu_domain) in arm_smmu_tlb_sync_context() argument 247 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_sync_context() 250 spin_lock_irqsave(&smmu_domain->cb_lock, flags); in arm_smmu_tlb_sync_context() 251 __arm_smmu_tlb_sync(smmu, ARM_SMMU_CB(smmu, smmu_domain->cfg.cbndx), in arm_smmu_tlb_sync_context() 253 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_tlb_sync_context() 258 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_context_s1() local 264 arm_smmu_cb_write(smmu_domain->smmu, smmu_domain->cfg.cbndx, in arm_smmu_tlb_inv_context_s1() 265 ARM_SMMU_CB_S1_TLBIASID, smmu_domain->cfg.asid); in arm_smmu_tlb_inv_context_s1() 266 arm_smmu_tlb_sync_context(smmu_domain); in arm_smmu_tlb_inv_context_s1() 271 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_context_s2() local 283 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_range_s1() local 311 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_range_s2() local 392 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_add_page_s2_v1() local 427 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_context_fault() local 480 arm_smmu_init_context_bank(struct arm_smmu_domain *smmu_domain, struct io_pgtable_cfg *pgtbl_cfg) arm_smmu_init_context_bank() argument 623 arm_smmu_alloc_context_bank(struct arm_smmu_domain *smmu_domain, struct arm_smmu_device *smmu, struct device *dev, unsigned int start) arm_smmu_alloc_context_bank() argument 642 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_init_domain_context() local 846 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_destroy_domain_context() local 878 struct arm_smmu_domain *smmu_domain; arm_smmu_domain_alloc() local 907 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_domain_free() local 1114 arm_smmu_domain_add_master(struct arm_smmu_domain *smmu_domain, struct arm_smmu_master_cfg *cfg, struct iommu_fwspec *fwspec) arm_smmu_domain_add_master() argument 1143 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_attach_dev() local 1246 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_flush_iotlb_all() local 1259 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_iotlb_sync() local 1277 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_iova_to_phys_hard() local 1329 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_iova_to_phys() local 1507 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_domain_get_attr() local 1537 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_domain_set_attr() local [all...] |
H A D | arm-smmu-impl.c | 71 static int cavium_init_context(struct arm_smmu_domain *smmu_domain, in cavium_init_context() argument 74 struct cavium_smmu *cs = container_of(smmu_domain->smmu, in cavium_init_context() 77 if (smmu_domain->stage == ARM_SMMU_DOMAIN_S2) in cavium_init_context() 78 smmu_domain->cfg.vmid += cs->id_base; in cavium_init_context() 80 smmu_domain->cfg.asid += cs->id_base; in cavium_init_context()
|
H A D | arm-smmu-nvidia.c | 204 struct arm_smmu_domain *smmu_domain; in nvidia_smmu_context_fault() local 206 smmu_domain = container_of(domain, struct arm_smmu_domain, domain); in nvidia_smmu_context_fault() 207 smmu = smmu_domain->smmu; in nvidia_smmu_context_fault()
|
H A D | arm-smmu.h | 429 int (*init_context)(struct arm_smmu_domain *smmu_domain, 436 int (*alloc_context_bank)(struct arm_smmu_domain *smmu_domain,
|
/kernel/linux/linux-6.6/drivers/iommu/arm/arm-smmu/ |
H A D | arm-smmu.c | 226 static void arm_smmu_tlb_sync_context(struct arm_smmu_domain *smmu_domain) in arm_smmu_tlb_sync_context() argument 228 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_sync_context() 231 spin_lock_irqsave(&smmu_domain->cb_lock, flags); in arm_smmu_tlb_sync_context() 232 __arm_smmu_tlb_sync(smmu, ARM_SMMU_CB(smmu, smmu_domain->cfg.cbndx), in arm_smmu_tlb_sync_context() 234 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_tlb_sync_context() 239 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_context_s1() local 245 arm_smmu_cb_write(smmu_domain->smmu, smmu_domain->cfg.cbndx, in arm_smmu_tlb_inv_context_s1() 246 ARM_SMMU_CB_S1_TLBIASID, smmu_domain->cfg.asid); in arm_smmu_tlb_inv_context_s1() 247 arm_smmu_tlb_sync_context(smmu_domain); in arm_smmu_tlb_inv_context_s1() 252 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_context_s2() local 264 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_range_s1() local 292 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_range_s2() local 312 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_walk_s1() local 364 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_add_page_s2_v1() local 396 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_context_fault() local 454 arm_smmu_init_context_bank(struct arm_smmu_domain *smmu_domain, struct io_pgtable_cfg *pgtbl_cfg) arm_smmu_init_context_bank() argument 600 arm_smmu_alloc_context_bank(struct arm_smmu_domain *smmu_domain, struct arm_smmu_device *smmu, struct device *dev, unsigned int start) arm_smmu_alloc_context_bank() argument 619 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_init_domain_context() local 823 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_destroy_domain_context() local 855 struct arm_smmu_domain *smmu_domain; arm_smmu_domain_alloc() local 878 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_domain_free() local 1084 arm_smmu_domain_add_master(struct arm_smmu_domain *smmu_domain, struct arm_smmu_master_cfg *cfg, struct iommu_fwspec *fwspec) arm_smmu_domain_add_master() argument 1113 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_attach_dev() local 1215 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_flush_iotlb_all() local 1228 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_iotlb_sync() local 1246 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_iova_to_phys_hard() local 1298 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_iova_to_phys() local 1488 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_enable_nesting() local 1504 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_set_pgtable_quirks() local [all...] |
H A D | arm-smmu-qcom.c | 62 struct arm_smmu_domain *smmu_domain = (void *)cookie; in qcom_adreno_smmu_get_fault_info() local 63 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in qcom_adreno_smmu_get_fault_info() 64 struct arm_smmu_device *smmu = smmu_domain->smmu; in qcom_adreno_smmu_get_fault_info() 77 struct arm_smmu_domain *smmu_domain = (void *)cookie; in qcom_adreno_smmu_set_stall() local 78 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in qcom_adreno_smmu_set_stall() 79 struct qcom_smmu *qsmmu = to_qcom_smmu(smmu_domain->smmu); in qcom_adreno_smmu_set_stall() 89 struct arm_smmu_domain *smmu_domain = (void *)cookie; in qcom_adreno_smmu_resume_translation() local 90 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in qcom_adreno_smmu_resume_translation() 91 struct arm_smmu_device *smmu = smmu_domain->smmu; in qcom_adreno_smmu_resume_translation() 124 struct arm_smmu_domain *smmu_domain in qcom_adreno_smmu_get_ttbr1_cfg() local 139 struct arm_smmu_domain *smmu_domain = (void *)cookie; qcom_adreno_smmu_set_ttbr0_cfg() local 177 qcom_adreno_smmu_alloc_context_bank(struct arm_smmu_domain *smmu_domain, struct arm_smmu_device *smmu, struct device *dev, int start) qcom_adreno_smmu_alloc_context_bank() argument 208 qcom_adreno_smmu_init_context(struct arm_smmu_domain *smmu_domain, struct io_pgtable_cfg *pgtbl_cfg, struct device *dev) qcom_adreno_smmu_init_context() argument 264 qcom_smmu_init_context(struct arm_smmu_domain *smmu_domain, struct io_pgtable_cfg *pgtbl_cfg, struct device *dev) qcom_smmu_init_context() argument [all...] |
H A D | arm-smmu-impl.c | 71 static int cavium_init_context(struct arm_smmu_domain *smmu_domain, in cavium_init_context() argument 74 struct cavium_smmu *cs = container_of(smmu_domain->smmu, in cavium_init_context() 77 if (smmu_domain->stage == ARM_SMMU_DOMAIN_S2) in cavium_init_context() 78 smmu_domain->cfg.vmid += cs->id_base; in cavium_init_context() 80 smmu_domain->cfg.asid += cs->id_base; in cavium_init_context()
|
H A D | arm-smmu-nvidia.c | 225 struct arm_smmu_domain *smmu_domain; in nvidia_smmu_context_fault() local 228 smmu_domain = container_of(domain, struct arm_smmu_domain, domain); in nvidia_smmu_context_fault() 229 smmu = smmu_domain->smmu; in nvidia_smmu_context_fault() 261 static int nvidia_smmu_init_context(struct arm_smmu_domain *smmu_domain, in nvidia_smmu_init_context() argument 265 struct arm_smmu_device *smmu = smmu_domain->smmu; in nvidia_smmu_init_context()
|
H A D | arm-smmu.h | 433 int (*init_context)(struct arm_smmu_domain *smmu_domain, 440 int (*alloc_context_bank)(struct arm_smmu_domain *smmu_domain,
|
/kernel/linux/linux-6.6/drivers/iommu/arm/arm-smmu-v3/ |
H A D | arm-smmu-v3-sva.c | 51 struct arm_smmu_domain *smmu_domain; in arm_smmu_share_asid() local 65 smmu_domain = container_of(cd, struct arm_smmu_domain, s1_cfg.cd); in arm_smmu_share_asid() 66 smmu = smmu_domain->smmu; in arm_smmu_share_asid() 83 arm_smmu_write_ctx_desc(smmu_domain, IOMMU_NO_PASID, cd); in arm_smmu_share_asid() 204 struct arm_smmu_domain *smmu_domain = smmu_mn->domain; in arm_smmu_mm_arch_invalidate_secondary_tlbs() local 213 if (!(smmu_domain->smmu->features & ARM_SMMU_FEAT_RANGE_INV)) { in arm_smmu_mm_arch_invalidate_secondary_tlbs() 221 if (!(smmu_domain->smmu->features & ARM_SMMU_FEAT_BTM)) { in arm_smmu_mm_arch_invalidate_secondary_tlbs() 223 arm_smmu_tlb_inv_asid(smmu_domain->smmu, in arm_smmu_mm_arch_invalidate_secondary_tlbs() 229 smmu_domain); in arm_smmu_mm_arch_invalidate_secondary_tlbs() 232 arm_smmu_atc_inv_domain(smmu_domain, m in arm_smmu_mm_arch_invalidate_secondary_tlbs() 238 struct arm_smmu_domain *smmu_domain = smmu_mn->domain; arm_smmu_mm_release() local 272 arm_smmu_mmu_notifier_get(struct arm_smmu_domain *smmu_domain, struct mm_struct *mm) arm_smmu_mmu_notifier_get() argument 326 struct arm_smmu_domain *smmu_domain = smmu_mn->domain; arm_smmu_mmu_notifier_put() local 355 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); __arm_smmu_sva_bind() local [all...] |
H A D | arm-smmu-v3.c | 974 static void arm_smmu_sync_cd(struct arm_smmu_domain *smmu_domain, in arm_smmu_sync_cd() argument 981 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_sync_cd() 992 spin_lock_irqsave(&smmu_domain->devices_lock, flags); in arm_smmu_sync_cd() 993 list_for_each_entry(master, &smmu_domain->devices, domain_head) { in arm_smmu_sync_cd() 999 spin_unlock_irqrestore(&smmu_domain->devices_lock, flags); in arm_smmu_sync_cd() 1029 static __le64 *arm_smmu_get_cd_ptr(struct arm_smmu_domain *smmu_domain, in arm_smmu_get_cd_ptr() argument 1035 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_get_cd_ptr() 1036 struct arm_smmu_ctx_desc_cfg *cdcfg = &smmu_domain->s1_cfg.cdcfg; in arm_smmu_get_cd_ptr() 1038 if (smmu_domain->s1_cfg.s1fmt == STRTAB_STE_0_S1FMT_LINEAR) in arm_smmu_get_cd_ptr() 1050 arm_smmu_sync_cd(smmu_domain, ssi in arm_smmu_get_cd_ptr() 1056 arm_smmu_write_ctx_desc(struct arm_smmu_domain *smmu_domain, int ssid, struct arm_smmu_ctx_desc *cd) arm_smmu_write_ctx_desc() argument 1135 arm_smmu_alloc_cd_tables(struct arm_smmu_domain *smmu_domain) arm_smmu_alloc_cd_tables() argument 1184 arm_smmu_free_cd_tables(struct arm_smmu_domain *smmu_domain) arm_smmu_free_cd_tables() argument 1281 struct arm_smmu_domain *smmu_domain = NULL; arm_smmu_write_strtab_ent() local 1809 arm_smmu_atc_inv_domain(struct arm_smmu_domain *smmu_domain, int ssid, unsigned long iova, size_t size) arm_smmu_atc_inv_domain() argument 1860 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_context() local 1881 __arm_smmu_tlb_inv_range(struct arm_smmu_cmdq_ent *cmd, unsigned long iova, size_t size, size_t granule, struct arm_smmu_domain *smmu_domain) __arm_smmu_tlb_inv_range() argument 1952 arm_smmu_tlb_inv_range_domain(unsigned long iova, size_t size, size_t granule, bool leaf, struct arm_smmu_domain *smmu_domain) arm_smmu_tlb_inv_range_domain() argument 1979 arm_smmu_tlb_inv_range_asid(unsigned long iova, size_t size, int asid, size_t granule, bool leaf, struct arm_smmu_domain *smmu_domain) arm_smmu_tlb_inv_range_asid() argument 1999 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_page_nosync() local 2036 struct arm_smmu_domain *smmu_domain; arm_smmu_domain_alloc() local 2065 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_domain_free() local 2089 arm_smmu_domain_finalise_s1(struct arm_smmu_domain *smmu_domain, struct arm_smmu_master *master, struct io_pgtable_cfg *pgtbl_cfg) arm_smmu_domain_finalise_s1() argument 2148 arm_smmu_domain_finalise_s2(struct arm_smmu_domain *smmu_domain, struct arm_smmu_master *master, struct io_pgtable_cfg *pgtbl_cfg) arm_smmu_domain_finalise_s2() argument 2187 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_domain_finalise() local 2309 struct arm_smmu_domain *smmu_domain = master->domain; arm_smmu_enable_ats() local 2327 struct arm_smmu_domain *smmu_domain = master->domain; arm_smmu_disable_ats() local 2392 struct arm_smmu_domain *smmu_domain = master->domain; arm_smmu_detach_dev() local 2414 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_attach_dev() local 2498 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_unmap_pages() local 2509 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_flush_iotlb_all() local 2518 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_iotlb_sync() local 2745 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_enable_nesting() local [all...] |
H A D | arm-smmu-v3.h | 748 int arm_smmu_write_ctx_desc(struct arm_smmu_domain *smmu_domain, int ssid, 753 struct arm_smmu_domain *smmu_domain); 755 int arm_smmu_atc_inv_domain(struct arm_smmu_domain *smmu_domain, int ssid,
|
/kernel/linux/linux-5.10/drivers/iommu/arm/arm-smmu-v3/ |
H A D | arm-smmu-v3.c | 908 static void arm_smmu_sync_cd(struct arm_smmu_domain *smmu_domain, in arm_smmu_sync_cd() argument 915 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_sync_cd() 924 spin_lock_irqsave(&smmu_domain->devices_lock, flags); in arm_smmu_sync_cd() 925 list_for_each_entry(master, &smmu_domain->devices, domain_head) { in arm_smmu_sync_cd() 931 spin_unlock_irqrestore(&smmu_domain->devices_lock, flags); in arm_smmu_sync_cd() 961 static __le64 *arm_smmu_get_cd_ptr(struct arm_smmu_domain *smmu_domain, in arm_smmu_get_cd_ptr() argument 967 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_get_cd_ptr() 968 struct arm_smmu_ctx_desc_cfg *cdcfg = &smmu_domain->s1_cfg.cdcfg; in arm_smmu_get_cd_ptr() 970 if (smmu_domain->s1_cfg.s1fmt == STRTAB_STE_0_S1FMT_LINEAR) in arm_smmu_get_cd_ptr() 982 arm_smmu_sync_cd(smmu_domain, ssi in arm_smmu_get_cd_ptr() 988 arm_smmu_write_ctx_desc(struct arm_smmu_domain *smmu_domain, int ssid, struct arm_smmu_ctx_desc *cd) arm_smmu_write_ctx_desc() argument 1065 arm_smmu_alloc_cd_tables(struct arm_smmu_domain *smmu_domain) arm_smmu_alloc_cd_tables() argument 1114 arm_smmu_free_cd_tables(struct arm_smmu_domain *smmu_domain) arm_smmu_free_cd_tables() argument 1212 struct arm_smmu_domain *smmu_domain = NULL; arm_smmu_write_strtab_ent() local 1590 arm_smmu_atc_inv_domain(struct arm_smmu_domain *smmu_domain, int ssid, unsigned long iova, size_t size) arm_smmu_atc_inv_domain() argument 1639 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_context() local 1661 arm_smmu_tlb_inv_range(unsigned long iova, size_t size, size_t granule, bool leaf, struct arm_smmu_domain *smmu_domain) arm_smmu_tlb_inv_range() argument 1742 struct arm_smmu_domain *smmu_domain = cookie; arm_smmu_tlb_inv_page_nosync() local 1782 struct arm_smmu_domain *smmu_domain; arm_smmu_domain_alloc() local 1831 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_domain_free() local 1856 arm_smmu_domain_finalise_s1(struct arm_smmu_domain *smmu_domain, struct arm_smmu_master *master, struct io_pgtable_cfg *pgtbl_cfg) arm_smmu_domain_finalise_s1() argument 1913 arm_smmu_domain_finalise_s2(struct arm_smmu_domain *smmu_domain, struct arm_smmu_master *master, struct io_pgtable_cfg *pgtbl_cfg) arm_smmu_domain_finalise_s2() argument 1950 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_domain_finalise() local 2075 struct arm_smmu_domain *smmu_domain = master->domain; arm_smmu_enable_ats() local 2093 struct arm_smmu_domain *smmu_domain = master->domain; arm_smmu_disable_ats() local 2158 struct arm_smmu_domain *smmu_domain = master->domain; arm_smmu_detach_dev() local 2180 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_attach_dev() local 2258 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_unmap() local 2269 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_flush_iotlb_all() local 2278 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_iotlb_sync() local 2426 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_domain_get_attr() local 2456 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); arm_smmu_domain_set_attr() local [all...] |
H A D | arm-smmu-v3-sva.c | 26 struct arm_smmu_domain *smmu_domain; in arm_smmu_share_asid() local 40 smmu_domain = container_of(cd, struct arm_smmu_domain, s1_cfg.cd); in arm_smmu_share_asid() 41 smmu = smmu_domain->smmu; in arm_smmu_share_asid() 58 arm_smmu_write_ctx_desc(smmu_domain, 0, cd); in arm_smmu_share_asid()
|
H A D | arm-smmu-v3.h | 686 int arm_smmu_write_ctx_desc(struct arm_smmu_domain *smmu_domain, int ssid,
|