Lines Matching refs:uiomr
86 int dmasync, struct usnic_uiom_reg *uiomr)
88 struct list_head *chunk_list = &uiomr->chunk_list;
125 uiomr->owning_mm = mm = current->mm;
188 mmgrab(uiomr->owning_mm);
215 struct usnic_uiom_reg *uiomr,
224 npages = PAGE_ALIGN(uiomr->length + uiomr->offset) >> PAGE_SHIFT;
225 vpn_start = (uiomr->va & PAGE_MASK) >> PAGE_SHIFT;
240 usnic_uiom_put_pages(&uiomr->chunk_list, dirty & writable);
245 struct usnic_uiom_reg *uiomr)
255 struct usnic_uiom_pd *pd = uiomr->pd;
256 long int va = uiomr->va & PAGE_MASK;
259 flags |= (uiomr->writable) ? IOMMU_WRITE : 0;
260 chunk = list_first_entry(&uiomr->chunk_list, struct usnic_uiom_chunk,
338 struct usnic_uiom_reg *uiomr;
359 uiomr = kmalloc(sizeof(*uiomr), GFP_KERNEL);
360 if (!uiomr)
363 uiomr->va = va_base;
364 uiomr->offset = offset;
365 uiomr->length = size;
366 uiomr->writable = writable;
367 uiomr->pd = pd;
370 uiomr);
389 err = usnic_uiom_map_sorted_intervals(&sorted_diff_intervals, uiomr);
408 return uiomr;
415 usnic_uiom_put_pages(&uiomr->chunk_list, 0);
417 mmdrop(uiomr->owning_mm);
419 kfree(uiomr);
423 static void __usnic_uiom_release_tail(struct usnic_uiom_reg *uiomr)
425 mmdrop(uiomr->owning_mm);
426 kfree(uiomr);
429 static inline size_t usnic_uiom_num_pages(struct usnic_uiom_reg *uiomr)
431 return PAGE_ALIGN(uiomr->length + uiomr->offset) >> PAGE_SHIFT;
434 void usnic_uiom_reg_release(struct usnic_uiom_reg *uiomr)
436 __usnic_uiom_reg_release(uiomr->pd, uiomr, 1);
438 atomic64_sub(usnic_uiom_num_pages(uiomr), &uiomr->owning_mm->pinned_vm);
439 __usnic_uiom_release_tail(uiomr);