Lines Matching refs:rk_domain
652 struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
658 spin_lock_irqsave(&rk_domain->dt_lock, flags);
660 dte = rk_domain->dt[rk_iova_dte_index(iova)];
672 spin_unlock_irqrestore(&rk_domain->dt_lock, flags);
677 static void rk_iommu_zap_iova(struct rk_iommu_domain *rk_domain,
684 spin_lock_irqsave(&rk_domain->iommus_lock, flags);
685 list_for_each(pos, &rk_domain->iommus) {
703 spin_unlock_irqrestore(&rk_domain->iommus_lock, flags);
706 static void rk_iommu_zap_iova_first_last(struct rk_iommu_domain *rk_domain,
709 rk_iommu_zap_iova(rk_domain, iova, SPAGE_SIZE);
711 rk_iommu_zap_iova(rk_domain, iova + size - SPAGE_SIZE,
715 static u32 *rk_dte_get_page_table(struct rk_iommu_domain *rk_domain,
723 assert_spin_locked(&rk_domain->dt_lock);
726 dte_addr = &rk_domain->dt[dte_index];
745 rk_table_flush(rk_domain,
746 rk_domain->dt_dma + dte_index * sizeof(u32), 1);
752 static size_t rk_iommu_unmap_iova(struct rk_iommu_domain *rk_domain,
759 assert_spin_locked(&rk_domain->dt_lock);
769 rk_table_flush(rk_domain, pte_dma, pte_count);
774 static int rk_iommu_map_iova(struct rk_iommu_domain *rk_domain, u32 *pte_addr,
782 assert_spin_locked(&rk_domain->dt_lock);
795 rk_table_flush(rk_domain, pte_dma, pte_total);
803 rk_iommu_zap_iova_first_last(rk_domain, iova, size);
808 rk_iommu_unmap_iova(rk_domain, pte_addr, pte_dma,
822 struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
829 spin_lock_irqsave(&rk_domain->dt_lock, flags);
838 page_table = rk_dte_get_page_table(rk_domain, iova);
840 spin_unlock_irqrestore(&rk_domain->dt_lock, flags);
844 dte_index = rk_domain->dt[rk_iova_dte_index(iova)];
849 ret = rk_iommu_map_iova(rk_domain, pte_addr, pte_dma, iova,
852 spin_unlock_irqrestore(&rk_domain->dt_lock, flags);
860 struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
868 spin_lock_irqsave(&rk_domain->dt_lock, flags);
877 dte = rk_domain->dt[rk_iova_dte_index(iova)];
880 spin_unlock_irqrestore(&rk_domain->dt_lock, flags);
887 unmap_size = rk_iommu_unmap_iova(rk_domain, pte_addr, pte_dma, size);
889 spin_unlock_irqrestore(&rk_domain->dt_lock, flags);
892 rk_iommu_zap_iova(rk_domain, iova, unmap_size);
925 struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
942 rk_ops->mk_dtentries(rk_domain->dt_dma));
960 struct rk_iommu_domain *rk_domain;
969 rk_domain = to_rk_domain(iommu->domain);
978 spin_lock_irqsave(&rk_domain->iommus_lock, flags);
980 spin_unlock_irqrestore(&rk_domain->iommus_lock, flags);
1017 struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
1041 spin_lock_irqsave(&rk_domain->iommus_lock, flags);
1042 list_add_tail(&iommu->node, &rk_domain->iommus);
1043 spin_unlock_irqrestore(&rk_domain->iommus_lock, flags);
1060 struct rk_iommu_domain *rk_domain;
1071 rk_domain = kzalloc(sizeof(*rk_domain), GFP_KERNEL);
1072 if (!rk_domain)
1080 rk_domain->dt = (u32 *)get_zeroed_page(GFP_KERNEL | rk_ops->gfp_flags);
1081 if (!rk_domain->dt)
1084 rk_domain->dt_dma = dma_map_single(dma_dev, rk_domain->dt,
1086 if (dma_mapping_error(dma_dev, rk_domain->dt_dma)) {
1091 spin_lock_init(&rk_domain->iommus_lock);
1092 spin_lock_init(&rk_domain->dt_lock);
1093 INIT_LIST_HEAD(&rk_domain->iommus);
1095 rk_domain->domain.geometry.aperture_start = 0;
1096 rk_domain->domain.geometry.aperture_end = DMA_BIT_MASK(32);
1097 rk_domain->domain.geometry.force_aperture = true;
1099 return &rk_domain->domain;
1102 free_page((unsigned long)rk_domain->dt);
1104 kfree(rk_domain);
1111 struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
1114 WARN_ON(!list_empty(&rk_domain->iommus));
1117 u32 dte = rk_domain->dt[i];
1127 dma_unmap_single(dma_dev, rk_domain->dt_dma,
1129 free_page((unsigned long)rk_domain->dt);
1131 kfree(rk_domain);