Lines Matching defs:context
16 static void etnaviv_context_unmap(struct etnaviv_iommu_context *context,
29 unmapped_page = context->global->ops->unmap(context, iova,
39 static int etnaviv_context_map(struct etnaviv_iommu_context *context,
55 ret = context->global->ops->map(context, iova, paddr, pgsize,
67 etnaviv_context_unmap(context, orig_iova, orig_size - size);
72 static int etnaviv_iommu_map(struct etnaviv_iommu_context *context, u32 iova,
79 if (!context || !sgt)
88 ret = etnaviv_context_map(context, da, pa, bytes, prot);
98 etnaviv_context_unmap(context, iova, da - iova);
102 static void etnaviv_iommu_unmap(struct etnaviv_iommu_context *context, u32 iova,
112 etnaviv_context_unmap(context, da, bytes);
122 static void etnaviv_iommu_remove_mapping(struct etnaviv_iommu_context *context,
127 etnaviv_iommu_unmap(context, mapping->vram_node.start,
132 static int etnaviv_iommu_find_iova(struct etnaviv_iommu_context *context,
139 lockdep_assert_held(&context->lock);
147 ret = drm_mm_insert_node_in_range(&context->mm, node,
153 drm_mm_scan_init(&scan, &context->mm, size, 0, 0, mode);
157 list_for_each_entry(free, &context->mappings, mmu_node) {
199 etnaviv_iommu_remove_mapping(context, m);
200 etnaviv_iommu_context_put(m->context);
201 m->context = NULL;
217 static int etnaviv_iommu_insert_exact(struct etnaviv_iommu_context *context,
220 return drm_mm_insert_node_in_range(&context->mm, node, size, 0, 0, va,
224 int etnaviv_iommu_map_gem(struct etnaviv_iommu_context *context,
234 mutex_lock(&context->lock);
237 if (context->global->version == ETNAVIV_IOMMU_V1 &&
244 list_add_tail(&mapping->mmu_node, &context->mappings);
253 ret = etnaviv_iommu_insert_exact(context, node,
256 ret = etnaviv_iommu_find_iova(context, node,
262 ret = etnaviv_iommu_map(context, node->start, sgt, etnaviv_obj->base.size,
270 list_add_tail(&mapping->mmu_node, &context->mappings);
271 context->flush_seq++;
273 mutex_unlock(&context->lock);
278 void etnaviv_iommu_unmap_gem(struct etnaviv_iommu_context *context,
283 mutex_lock(&context->lock);
286 if (!mapping->context) {
287 mutex_unlock(&context->lock);
292 if (mapping->vram_node.mm == &context->mm)
293 etnaviv_iommu_remove_mapping(context, mapping);
296 context->flush_seq++;
297 mutex_unlock(&context->lock);
302 struct etnaviv_iommu_context *context =
305 etnaviv_cmdbuf_suballoc_unmap(context, &context->cmdbuf_mapping);
307 context->global->ops->free(context);
309 void etnaviv_iommu_context_put(struct etnaviv_iommu_context *context)
311 kref_put(&context->refcount, etnaviv_iommu_context_free);
351 struct etnaviv_iommu_context *context)
353 context->global->ops->restore(gpu, context);
356 int etnaviv_iommu_get_suballoc_va(struct etnaviv_iommu_context *context,
361 mutex_lock(&context->lock);
365 mutex_unlock(&context->lock);
375 if (context->global->version == ETNAVIV_IOMMU_V1) {
381 ret = etnaviv_iommu_find_iova(context, node, size);
383 mutex_unlock(&context->lock);
388 ret = etnaviv_context_map(context, node->start, paddr, size,
392 mutex_unlock(&context->lock);
396 context->flush_seq++;
399 list_add_tail(&mapping->mmu_node, &context->mappings);
402 mutex_unlock(&context->lock);
407 void etnaviv_iommu_put_suballoc_va(struct etnaviv_iommu_context *context,
412 mutex_lock(&context->lock);
415 if (mapping->use > 0 || context->global->version == ETNAVIV_IOMMU_V1) {
416 mutex_unlock(&context->lock);
420 etnaviv_context_unmap(context, node->start, node->size);
422 mutex_unlock(&context->lock);
425 size_t etnaviv_iommu_dump_size(struct etnaviv_iommu_context *context)
427 return context->global->ops->dump_size(context);
430 void etnaviv_iommu_dump(struct etnaviv_iommu_context *context, void *buf)
432 context->global->ops->dump(context, buf);