Lines Matching defs:buf

40 	struct vb2_vmalloc_buf *buf;
42 buf = kzalloc(sizeof(*buf), GFP_KERNEL | vb->vb2_queue->gfp_flags);
43 if (!buf)
46 buf->size = size;
47 buf->vaddr = vmalloc_user(buf->size);
48 if (!buf->vaddr) {
49 pr_debug("vmalloc of size %ld failed\n", buf->size);
50 kfree(buf);
54 buf->dma_dir = vb->vb2_queue->dma_dir;
55 buf->handler.refcount = &buf->refcount;
56 buf->handler.put = vb2_vmalloc_put;
57 buf->handler.arg = buf;
59 refcount_set(&buf->refcount, 1);
60 return buf;
65 struct vb2_vmalloc_buf *buf = buf_priv;
67 if (refcount_dec_and_test(&buf->refcount)) {
68 vfree(buf->vaddr);
69 kfree(buf);
76 struct vb2_vmalloc_buf *buf;
81 buf = kzalloc(sizeof(*buf), GFP_KERNEL);
82 if (!buf)
85 buf->dma_dir = vb->vb2_queue->dma_dir;
87 buf->size = size;
89 buf->dma_dir == DMA_FROM_DEVICE ||
90 buf->dma_dir == DMA_BIDIRECTIONAL);
95 buf->vec = vec;
107 buf->vaddr = (__force void *)
110 buf->vaddr = vm_map_ram(frame_vector_pages(vec), n_pages, -1);
113 if (!buf->vaddr)
115 buf->vaddr += offset;
116 return buf;
121 kfree(buf);
128 struct vb2_vmalloc_buf *buf = buf_priv;
129 unsigned long vaddr = (unsigned long)buf->vaddr & PAGE_MASK;
134 if (!buf->vec->is_pfns) {
135 n_pages = frame_vector_count(buf->vec);
136 pages = frame_vector_pages(buf->vec);
139 if (buf->dma_dir == DMA_FROM_DEVICE ||
140 buf->dma_dir == DMA_BIDIRECTIONAL)
144 iounmap((__force void __iomem *)buf->vaddr);
146 vb2_destroy_framevec(buf->vec);
147 kfree(buf);
152 struct vb2_vmalloc_buf *buf = buf_priv;
154 if (!buf->vaddr) {
159 return buf->vaddr;
164 struct vb2_vmalloc_buf *buf = buf_priv;
165 return refcount_read(&buf->refcount);
170 struct vb2_vmalloc_buf *buf = buf_priv;
173 if (!buf) {
178 ret = remap_vmalloc_range(vma, buf->vaddr, 0);
192 vma->vm_private_data = &buf->handler;
214 struct vb2_vmalloc_buf *buf = dbuf->priv;
215 int num_pages = PAGE_ALIGN(buf->size) / PAGE_SIZE;
218 void *vaddr = buf->vaddr;
311 struct vb2_vmalloc_buf *buf = dbuf->priv;
313 iosys_map_set_vaddr(map, buf->vaddr);
338 struct vb2_vmalloc_buf *buf = buf_priv;
343 exp_info.size = buf->size;
345 exp_info.priv = buf;
347 if (WARN_ON(!buf->vaddr))
355 refcount_inc(&buf->refcount);
368 struct vb2_vmalloc_buf *buf = mem_priv;
372 ret = dma_buf_vmap_unlocked(buf->dbuf, &map);
375 buf->vaddr = map.vaddr;
382 struct vb2_vmalloc_buf *buf = mem_priv;
383 struct iosys_map map = IOSYS_MAP_INIT_VADDR(buf->vaddr);
385 dma_buf_vunmap_unlocked(buf->dbuf, &map);
386 buf->vaddr = NULL;
391 struct vb2_vmalloc_buf *buf = mem_priv;
392 struct iosys_map map = IOSYS_MAP_INIT_VADDR(buf->vaddr);
394 if (buf->vaddr)
395 dma_buf_vunmap_unlocked(buf->dbuf, &map);
397 kfree(buf);
405 struct vb2_vmalloc_buf *buf;
410 buf = kzalloc(sizeof(*buf), GFP_KERNEL);
411 if (!buf)
414 buf->dbuf = dbuf;
415 buf->dma_dir = vb->vb2_queue->dma_dir;
416 buf->size = size;
418 return buf;