Lines Matching defs:renderer
136 void (*destroy)(struct vn_renderer *renderer,
139 VkResult (*submit)(struct vn_renderer *renderer,
146 VkResult (*wait)(struct vn_renderer *renderer,
151 struct vn_renderer_shmem *(*create)(struct vn_renderer *renderer,
153 void (*destroy)(struct vn_renderer *renderer,
159 struct vn_renderer *renderer,
166 VkResult (*create_from_dma_buf)(struct vn_renderer *renderer,
172 bool (*destroy)(struct vn_renderer *renderer, struct vn_renderer_bo *bo);
174 int (*export_dma_buf)(struct vn_renderer *renderer,
178 void *(*map)(struct vn_renderer *renderer, struct vn_renderer_bo *bo);
180 void (*flush)(struct vn_renderer *renderer,
184 void (*invalidate)(struct vn_renderer *renderer,
196 VkResult (*create)(struct vn_renderer *renderer,
201 VkResult (*create_from_syncobj)(struct vn_renderer *renderer,
205 void (*destroy)(struct vn_renderer *renderer,
208 int (*export_syncobj)(struct vn_renderer *renderer,
213 VkResult (*reset)(struct vn_renderer *renderer,
218 VkResult (*read)(struct vn_renderer *renderer,
223 VkResult (*write)(struct vn_renderer *renderer,
239 struct vn_renderer **renderer);
244 struct vn_renderer **renderer);
249 struct vn_renderer **renderer)
252 VkResult result = vn_renderer_create_vtest(instance, alloc, renderer);
257 return vn_renderer_create_virtgpu(instance, alloc, renderer);
261 vn_renderer_destroy(struct vn_renderer *renderer,
264 renderer->ops.destroy(renderer, alloc);
268 vn_renderer_submit(struct vn_renderer *renderer,
271 return renderer->ops.submit(renderer, submit);
275 vn_renderer_wait(struct vn_renderer *renderer,
278 return renderer->ops.wait(renderer, wait);
282 vn_renderer_shmem_create(struct vn_renderer *renderer, size_t size)
286 renderer->shmem_ops.create(renderer, size);
298 vn_renderer_shmem_ref(struct vn_renderer *renderer,
306 vn_renderer_shmem_unref(struct vn_renderer *renderer,
310 renderer->shmem_ops.destroy(renderer, shmem);
315 struct vn_renderer *renderer,
323 VkResult result = renderer->bo_ops.create_from_device_memory(
324 renderer, size, mem_id, flags, external_handles, &bo);
337 vn_renderer_bo_create_from_dma_buf(struct vn_renderer *renderer,
345 renderer->bo_ops.create_from_dma_buf(renderer, size, fd, flags, &bo);
358 vn_renderer_bo_ref(struct vn_renderer *renderer, struct vn_renderer_bo *bo)
365 vn_renderer_bo_unref(struct vn_renderer *renderer, struct vn_renderer_bo *bo)
368 return renderer->bo_ops.destroy(renderer, bo);
373 vn_renderer_bo_export_dma_buf(struct vn_renderer *renderer,
376 return renderer->bo_ops.export_dma_buf(renderer, bo);
380 vn_renderer_bo_map(struct vn_renderer *renderer, struct vn_renderer_bo *bo)
382 return renderer->bo_ops.map(renderer, bo);
386 vn_renderer_bo_flush(struct vn_renderer *renderer,
391 renderer->bo_ops.flush(renderer, bo, offset, end);
395 vn_renderer_bo_invalidate(struct vn_renderer *renderer,
400 renderer->bo_ops.invalidate(renderer, bo, offset, size);
404 vn_renderer_sync_create(struct vn_renderer *renderer,
409 return renderer->sync_ops.create(renderer, initial_val, flags, out_sync);
413 vn_renderer_sync_create_from_syncobj(struct vn_renderer *renderer,
418 return renderer->sync_ops.create_from_syncobj(renderer, fd, sync_file,
423 vn_renderer_sync_destroy(struct vn_renderer *renderer,
426 renderer->sync_ops.destroy(renderer, sync);
430 vn_renderer_sync_export_syncobj(struct vn_renderer *renderer,
434 return renderer->sync_ops.export_syncobj(renderer, sync, sync_file);
438 vn_renderer_sync_reset(struct vn_renderer *renderer,
442 return renderer->sync_ops.reset(renderer, sync, initial_val);
446 vn_renderer_sync_read(struct vn_renderer *renderer,
450 return renderer->sync_ops.read(renderer, sync, val);
454 vn_renderer_sync_write(struct vn_renderer *renderer,
458 return renderer->sync_ops.write(renderer, sync, val);