Lines Matching refs:mapping

7 #include <linux/dma-mapping.h>
229 struct etnaviv_vram_mapping *mapping;
231 list_for_each_entry(mapping, &obj->vram_list, obj_node) {
232 if (mapping->context == context)
233 return mapping;
239 void etnaviv_gem_mapping_unreference(struct etnaviv_vram_mapping *mapping)
241 struct etnaviv_gem_object *etnaviv_obj = mapping->object;
244 WARN_ON(mapping->use == 0);
245 mapping->use -= 1;
256 struct etnaviv_vram_mapping *mapping;
261 mapping = etnaviv_gem_get_vram_mapping(etnaviv_obj, mmu_context);
262 if (mapping) {
267 * the MMU owns this mapping to close this race.
269 if (mapping->use == 0) {
271 if (mapping->context == mmu_context)
272 mapping->use += 1;
274 mapping = NULL;
276 if (mapping)
279 mapping->use += 1;
291 * See if we have a reaped vram mapping we can re-use before
292 * allocating a fresh mapping.
294 mapping = etnaviv_gem_get_vram_mapping(etnaviv_obj, NULL);
295 if (!mapping) {
296 mapping = kzalloc(sizeof(*mapping), GFP_KERNEL);
297 if (!mapping) {
302 INIT_LIST_HEAD(&mapping->scan_node);
303 mapping->object = etnaviv_obj;
305 list_del(&mapping->obj_node);
308 mapping->context = etnaviv_iommu_context_get(mmu_context);
309 mapping->use = 1;
313 mapping, va);
316 kfree(mapping);
318 list_add_tail(&mapping->obj_node, &etnaviv_obj->vram_list);
329 return mapping;
520 struct etnaviv_vram_mapping *mapping, *tmp;
529 list_for_each_entry_safe(mapping, tmp, &etnaviv_obj->vram_list,
531 struct etnaviv_iommu_context *context = mapping->context;
533 WARN_ON(mapping->use);
536 etnaviv_iommu_unmap_gem(context, mapping);
540 list_del(&mapping->obj_node);
541 kfree(mapping);