Lines Matching refs:sent
43 #define lv1ent_fault(sent) ((*(sent) == ZERO_LV2LINK) || \
44 ((*(sent) & 3) == 0) || ((*(sent) & 3) == 3))
45 #define lv1ent_zero(sent) (*(sent) == ZERO_LV2LINK)
46 #define lv1ent_page_zero(sent) ((*(sent) & 3) == 1)
47 #define lv1ent_page(sent) ((*(sent) != ZERO_LV2LINK) && \
48 ((*(sent) & 3) == 1))
49 #define lv1ent_section(sent) ((*(sent) & 3) == 2)
98 #define section_phys(sent) (sect_to_phys(*(sent)) & SECT_MASK)
122 #define lv2table_base(sent) (sect_to_phys(*(sent) & 0xFFFFFFC0))
188 static sysmmu_pte_t *page_entry(sysmmu_pte_t *sent, sysmmu_iova_t iova)
191 lv2table_base(sent)) + lv2ent_offset(iova);
923 sysmmu_pte_t *sent, sysmmu_iova_t iova, short *pgcounter)
925 if (lv1ent_section(sent)) {
930 if (lv1ent_fault(sent)) {
933 bool need_flush_flpd_cache = lv1ent_zero(sent);
940 exynos_iommu_set_pte(sent, mk_lv1ent_page(virt_to_phys(pent)));
977 return page_entry(sent, iova);
981 sysmmu_pte_t *sent, sysmmu_iova_t iova,
984 if (lv1ent_section(sent)) {
990 if (lv1ent_page(sent)) {
997 kmem_cache_free(lv2table_kmem_cache, page_entry(sent, 0));
1001 exynos_iommu_set_pte(sent, mk_lv1ent_sect(paddr, prot));
1004 if (lv1ent_page_zero(sent)) {
1168 /* lv1ent_page(sent) == true here */