Lines Matching defs:device
119 kperfmon_create(struct v3dv_device *device,
135 int ret = v3dv_ioctl(device->pdevice->render_fd,
146 kperfmon_destroy(struct v3dv_device *device,
159 int ret = v3dv_ioctl(device->pdevice->render_fd,
176 V3DV_FROM_HANDLE(v3dv_device, device, _device);
184 vk_object_zalloc(&device->vk, pAllocator, sizeof(*pool),
187 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
196 pool->queries = vk_alloc2(&device->vk.alloc, pAllocator, pool_bytes, 8,
199 result = vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
211 pool->bo = v3dv_bo_alloc(device, bo_size, "query", true);
213 result = vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
216 if (!v3dv_bo_map(device, pool->bo, bo_size)) {
217 result = vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
260 result = vk_sync_create(&device->vk,
261 &device->pdevice->drm_syncobj_type, 0, 0,
282 vk_sync_destroy(&device->vk, pool->queries[j].perf.last_job_sync);
286 v3dv_bo_free(device, pool->bo);
288 vk_free2(&device->vk.alloc, pAllocator, pool->queries);
289 vk_object_free(&device->vk, pAllocator, pool);
299 V3DV_FROM_HANDLE(v3dv_device, device, _device);
306 v3dv_bo_free(device, pool->bo);
310 kperfmon_destroy(device, pool, i);
311 vk_sync_destroy(&device->vk, pool->queries[i].perf.last_job_sync);
316 vk_free2(&device->vk.alloc, pAllocator, pool->queries);
318 vk_object_free(&device->vk, pAllocator, pool);
334 query_wait_available(struct v3dv_device *device,
345 mtx_lock(&device->query_mutex);
347 if (vk_device_is_lost(&device->vk)) {
352 int ret = cnd_timedwait(&device->query_ended,
353 &device->query_mutex,
356 mtx_unlock(&device->query_mutex);
357 result = vk_device_set_lost(&device->vk, "Query wait failed");
361 mtx_unlock(&device->query_mutex);
368 !v3dv_bo_wait(device, q->bo, 0xffffffffffffffffull))
369 return vk_device_set_lost(&device->vk, "Query BO wait failed: %m");
372 vk_sync_wait(&device->vk, q->perf.last_job_sync,
374 return vk_device_set_lost(&device->vk, "Query job wait failed");
380 write_occlusion_query_result(struct v3dv_device *device,
389 if (vk_device_is_lost(&device->vk))
401 write_timestamp_query_result(struct v3dv_device *device,
417 write_performance_query_result(struct v3dv_device *device,
436 int ret = v3dv_ioctl(device->pdevice->render_fd,
442 return vk_error(device, VK_ERROR_DEVICE_LOST);
453 query_check_available(struct v3dv_device *device,
461 !v3dv_bo_wait(device, q->bo, 0))
465 vk_sync_wait(&device->vk, q->perf.last_job_sync,
473 write_query_result(struct v3dv_device *device,
482 return write_occlusion_query_result(device, pool, query, do_64bit,
485 return write_timestamp_query_result(device, pool, query, do_64bit,
488 return write_performance_query_result(device, pool, query, do_64bit,
496 query_is_available(struct v3dv_device *device,
508 VkResult result = query_wait_available(device, q, pool->query_type);
516 VkResult result = query_check_available(device, q, pool->query_type);
539 v3dv_get_query_pool_results(struct v3dv_device *device,
562 query_is_available(device, pool, i, do_wait, &available);
580 write_query_result(device, pool, i, do_64bit, data, slot);
605 V3DV_FROM_HANDLE(v3dv_device, device, _device);
608 return v3dv_get_query_pool_results(device, pool, firstQuery, queryCount,
667 v3dv_reset_query_pools(struct v3dv_device *device,
672 mtx_lock(&device->query_mutex);
689 kperfmon_destroy(device, pool, i);
690 kperfmon_create(device, pool, i);
691 if (vk_sync_reset(&device->vk, q->perf.last_job_sync) != VK_SUCCESS)
699 mtx_unlock(&device->query_mutex);
708 V3DV_FROM_HANDLE(v3dv_device, device, _device);
711 v3dv_reset_query_pools(device, pool, firstQuery, queryCount);
776 v3dv_ReleaseProfilingLockKHR(VkDevice device)