Lines Matching refs:buffer
15 void init_heap_helper_buffer(struct heap_helper_buffer *buffer,
18 buffer->priv_virt = NULL;
19 mutex_init(&buffer->lock);
20 buffer->vmap_cnt = 0;
21 buffer->vaddr = NULL;
22 buffer->pagecount = 0;
23 buffer->pages = NULL;
24 INIT_LIST_HEAD(&buffer->attachments);
25 buffer->free = free;
28 struct dma_buf *heap_helper_export_dmabuf(struct heap_helper_buffer *buffer,
33 exp_info.exp_name = dma_heap_get_name(buffer->heap);
35 exp_info.size = buffer->size;
37 exp_info.priv = buffer;
42 static void *dma_heap_map_kernel(struct heap_helper_buffer *buffer)
46 vaddr = vmap(buffer->pages, buffer->pagecount, VM_MAP, PAGE_KERNEL);
53 static void dma_heap_buffer_destroy(struct heap_helper_buffer *buffer)
55 if (buffer->vmap_cnt > 0) {
56 WARN(1, "%s: buffer still mapped in the kernel\n", __func__);
57 vunmap(buffer->vaddr);
60 buffer->free(buffer);
63 static void *dma_heap_buffer_vmap_get(struct heap_helper_buffer *buffer)
67 if (buffer->vmap_cnt) {
68 buffer->vmap_cnt++;
69 return buffer->vaddr;
71 vaddr = dma_heap_map_kernel(buffer);
74 buffer->vaddr = vaddr;
75 buffer->vmap_cnt++;
79 static void dma_heap_buffer_vmap_put(struct heap_helper_buffer *buffer)
81 if (!--buffer->vmap_cnt) {
82 vunmap(buffer->vaddr);
83 buffer->vaddr = NULL;
97 struct heap_helper_buffer *buffer = dmabuf->priv;
104 ret = sg_alloc_table_from_pages(&a->table, buffer->pages,
105 buffer->pagecount, 0,
106 buffer->pagecount << PAGE_SHIFT,
118 mutex_lock(&buffer->lock);
119 list_add(&a->list, &buffer->attachments);
120 mutex_unlock(&buffer->lock);
129 struct heap_helper_buffer *buffer = dmabuf->priv;
131 mutex_lock(&buffer->lock);
133 mutex_unlock(&buffer->lock);
163 struct heap_helper_buffer *buffer = vma->vm_private_data;
165 if (vmf->pgoff > buffer->pagecount)
168 vmf->page = buffer->pages[vmf->pgoff];
180 struct heap_helper_buffer *buffer = dmabuf->priv;
186 vma->vm_private_data = buffer;
193 struct heap_helper_buffer *buffer = dmabuf->priv;
195 dma_heap_buffer_destroy(buffer);
201 struct heap_helper_buffer *buffer = dmabuf->priv;
205 mutex_lock(&buffer->lock);
207 if (buffer->vmap_cnt)
208 invalidate_kernel_vmap_range(buffer->vaddr, buffer->size);
210 list_for_each_entry(a, &buffer->attachments, list) {
214 mutex_unlock(&buffer->lock);
222 struct heap_helper_buffer *buffer = dmabuf->priv;
225 mutex_lock(&buffer->lock);
227 if (buffer->vmap_cnt)
228 flush_kernel_vmap_range(buffer->vaddr, buffer->size);
230 list_for_each_entry(a, &buffer->attachments, list) {
234 mutex_unlock(&buffer->lock);
241 struct heap_helper_buffer *buffer = dmabuf->priv;
244 mutex_lock(&buffer->lock);
245 vaddr = dma_heap_buffer_vmap_get(buffer);
246 mutex_unlock(&buffer->lock);
253 struct heap_helper_buffer *buffer = dmabuf->priv;
255 mutex_lock(&buffer->lock);
256 dma_heap_buffer_vmap_put(buffer);
257 mutex_unlock(&buffer->lock);