Lines Matching refs:device

99    struct anv_device *device = (struct anv_device *)data;
100 UNUSED struct anv_instance *instance = device->physical->instance;
178 get_device_extensions(const struct anv_physical_device *device,
182 (device->sync_syncobj_type.features & VK_SYNC_FEATURE_CPU_WAIT) != 0;
188 .KHR_8bit_storage = device->info.ver >= 8,
189 .KHR_16bit_storage = device->info.ver >= 8,
191 .KHR_buffer_device_address = device->has_a64_buffer_access,
209 .KHR_fragment_shading_rate = device->info.ver >= 11,
222 !anv_use_relocations(device) && device->perf &&
223 (device->perf->i915_perf_version >= 3 ||
225 device->use_call_secondary,
228 .KHR_ray_query = device->info.has_ray_tracing,
233 .KHR_shader_atomic_int64 = device->info.ver >= 9,
236 .KHR_shader_float16_int8 = device->info.ver >= 8,
237 .KHR_shader_float_controls = device->info.ver >= 8,
240 .KHR_shader_subgroup_extended_types = device->info.ver >= 8,
257 .EXT_border_color_swizzle = device->info.ver >= 8,
258 .EXT_buffer_device_address = device->has_a64_buffer_access,
259 .EXT_calibrated_timestamps = device->has_reg_timestamp,
261 .EXT_conditional_rendering = device->info.verx10 >= 75,
262 .EXT_conservative_rasterization = device->info.ver >= 9,
263 .EXT_custom_border_color = device->info.ver >= 8,
266 .EXT_descriptor_indexing = device->has_a64_buffer_access &&
267 device->has_bindless_images,
275 .EXT_fragment_shader_interlock = device->info.ver >= 9,
276 .EXT_global_priority = device->max_context_priority >=
278 .EXT_global_priority_query = device->max_context_priority >=
291 .EXT_memory_budget = (!device->info.has_local_mem ||
292 device->vram_mappable.available > 0) &&
293 device->sys.available,
299 .EXT_post_depth_coverage = device->info.ver >= 9,
307 .EXT_sampler_filter_minmax = device->info.ver >= 9,
311 .EXT_shader_atomic_float2 = device->info.ver >= 9,
314 .EXT_shader_stencil_export = device->info.ver >= 9,
331 .INTEL_performance_query = device->perf &&
332 device->perf->i915_perf_version >= 3,
333 .INTEL_shader_integer_functions2 = device->info.ver >= 8,
336 .NV_mesh_shader = device->info.has_mesh_shading &&
343 anv_compute_sys_heap_size(struct anv_physical_device *device,
358 available_ram = MIN2(available_ram, device->gtt_size * 3 / 4);
360 if (available_ram > (2ull << 30) && !device->supports_48bit_addresses) {
376 anv_init_meminfo(struct anv_physical_device *device, int fd)
378 const struct intel_device_info *devinfo = &device->info;
380 device->sys.region.memory_class = devinfo->mem.sram.mem_class;
381 device->sys.region.memory_instance = devinfo->mem.sram.mem_instance;
382 device->sys.size =
383 anv_compute_sys_heap_size(device, devinfo->mem.sram.mappable.size);
384 device->sys.available = devinfo->mem.sram.mappable.free;
386 device->vram_mappable.region.memory_class = devinfo->mem.vram.mem_class;
387 device->vram_mappable.region.memory_instance =
389 device->vram_mappable.size = devinfo->mem.vram.mappable.size;
390 device->vram_mappable.available = devinfo->mem.vram.mappable.free;
392 device->vram_non_mappable.region.memory_class =
394 device->vram_non_mappable.region.memory_instance =
396 device->vram_non_mappable.size = devinfo->mem.vram.unmappable.size;
397 device->vram_non_mappable.available = devinfo->mem.vram.unmappable.free;
403 anv_update_meminfo(struct anv_physical_device *device, int fd)
405 if (!intel_device_info_update_memory_info(&device->info, fd))
408 const struct intel_device_info *devinfo = &device->info;
409 device->sys.available = devinfo->mem.sram.mappable.free;
410 device->vram_mappable.available = devinfo->mem.vram.mappable.free;
411 device->vram_non_mappable.available = devinfo->mem.vram.unmappable.free;
416 anv_physical_device_init_heaps(struct anv_physical_device *device, int fd)
418 VkResult result = anv_init_meminfo(device, fd);
422 assert(device->sys.size != 0);
424 if (anv_physical_device_has_vram(device)) {
430 device->memory.heap_count = 2;
431 device->memory.heaps[0] = (struct anv_memory_heap) {
432 /* If there is a vram_non_mappable, use that for the device only
435 .size = device->vram_non_mappable.size != 0 ?
436 device->vram_non_mappable.size : device->vram_mappable.size,
440 device->memory.heaps[1] = (struct anv_memory_heap) {
441 .size = device->sys.size,
448 if (device->vram_non_mappable.size > 0) {
449 device->memory.heap_count++;
450 device->memory.heaps[2] = (struct anv_memory_heap) {
451 .size = device->vram_mappable.size,
457 device->memory.type_count = 3;
458 device->memory.types[0] = (struct anv_memory_type) {
462 device->memory.types[1] = (struct anv_memory_type) {
468 device->memory.types[2] = (struct anv_memory_type) {
476 .heapIndex = device->vram_non_mappable.size > 0 ? 2 : 0,
478 } else if (device->info.has_llc) {
479 device->memory.heap_count = 1;
480 device->memory.heaps[0] = (struct anv_memory_heap) {
481 .size = device->sys.size,
489 device->memory.type_count = 1;
490 device->memory.types[0] = (struct anv_memory_type) {
498 device->memory.heap_count = 1;
499 device->memory.heaps[0] = (struct anv_memory_heap) {
500 .size = device->sys.size,
510 device->memory.type_count = 2;
511 device->memory.types[0] = (struct anv_memory_type) {
517 device->memory.types[1] = (struct anv_memory_type) {
525 device->memory.need_clflush = false;
526 for (unsigned i = 0; i < device->memory.type_count; i++) {
527 VkMemoryPropertyFlags props = device->memory.types[i].propertyFlags;
530 device->memory.need_clflush = true;
537 anv_physical_device_init_uuids(struct anv_physical_device *device)
542 return vk_errorf(device, VK_ERROR_INITIALIZATION_FAILED,
548 return vk_errorf(device, VK_ERROR_INITIALIZATION_FAILED,
552 memcpy(device->driver_build_sha1, build_id_data(note), 20);
559 * invalid. It needs both a driver build and the PCI ID of the device.
563 _mesa_sha1_update(&sha1_ctx, &device->info.pci_device_id,
564 sizeof(device->info.pci_device_id));
565 _mesa_sha1_update(&sha1_ctx, &device->always_use_bindless,
566 sizeof(device->always_use_bindless));
567 _mesa_sha1_update(&sha1_ctx, &device->has_a64_buffer_access,
568 sizeof(device->has_a64_buffer_access));
569 _mesa_sha1_update(&sha1_ctx, &device->has_bindless_images,
570 sizeof(device->has_bindless_images));
571 _mesa_sha1_update(&sha1_ctx, &device->has_bindless_samplers,
572 sizeof(device->has_bindless_samplers));
574 memcpy(device->pipeline_cache_uuid, sha1, VK_UUID_SIZE);
576 intel_uuid_compute_driver_id(device->driver_uuid, &device->info, VK_UUID_SIZE);
577 intel_uuid_compute_device_id(device->device_uuid, &device->info, VK_UUID_SIZE);
583 anv_physical_device_init_disk_cache(struct anv_physical_device *device)
588 device->info.pci_device_id);
592 _mesa_sha1_format(timestamp, device->driver_build_sha1);
595 brw_get_compiler_config_value(device->compiler);
596 device->vk.disk_cache = disk_cache_create(renderer, timestamp, driver_flags);
601 anv_physical_device_free_disk_cache(struct anv_physical_device *device)
604 if (device->vk.disk_cache) {
605 disk_cache_destroy(device->vk.disk_cache);
606 device->vk.disk_cache = NULL;
609 assert(device->vk.disk_cache == NULL);
751 "Unable to open device %s: out of memory", path);
754 "Unable to open device %s: %m", path);
779 struct anv_physical_device *device =
780 vk_zalloc(&instance->vk.alloc, sizeof(*device), 8,
782 if (device == NULL) {
793 result = vk_physical_device_init(&device->vk, &instance->vk,
800 device->instance = instance;
802 assert(strlen(path) < ARRAY_SIZE(device->path));
803 snprintf(device->path, ARRAY_SIZE(device->path), "%s", path);
805 device->info = devinfo;
806 device->is_alpha = is_alpha;
808 device->cmd_parser_version = -1;
809 if (device->info.ver == 7) {
810 device->cmd_parser_version =
812 if (device->cmd_parser_version == -1) {
813 result = vk_errorf(device, VK_ERROR_INITIALIZATION_FAILED,
820 result = vk_errorf(device, VK_ERROR_INITIALIZATION_FAILED,
826 result = vk_errorf(device, VK_ERROR_INITIALIZATION_FAILED,
831 if (!device->info.has_llc &&
833 result = vk_errorf(device, VK_ERROR_INITIALIZATION_FAILED,
838 device->use_relocations = device->info.ver < 8 ||
839 device->info.platform == INTEL_PLATFORM_CHV;
841 if (!device->use_relocations &&
843 result = vk_errorf(device, VK_ERROR_INITIALIZATION_FAILED,
849 result = vk_errorf(device, VK_ERROR_INITIALIZATION_FAILED,
854 device->has_exec_async = anv_gem_get_param(fd, I915_PARAM_HAS_EXEC_ASYNC);
855 device->has_exec_capture = anv_gem_get_param(fd, I915_PARAM_HAS_EXEC_CAPTURE);
863 device->max_context_priority = INT_MIN;
867 device->max_context_priority = priorities[i];
870 device->gtt_size = device->info.gtt_size ? device->info.gtt_size :
871 device->info.aperture_bytes;
876 device->supports_48bit_addresses = (device->info.ver >= 8) &&
877 device->gtt_size > (4ULL << 30 /* GiB */);
880 memset(&device->vram_non_mappable, 0, sizeof(device->vram_non_mappable));
881 memset(&device->vram_mappable, 0, sizeof(device->vram_mappable));
882 memset(&device->sys, 0, sizeof(device->sys));
884 result = anv_physical_device_init_heaps(device, fd);
888 assert(device->supports_48bit_addresses == !device->use_relocations);
889 device->use_softpin = !device->use_relocations;
891 device->has_context_isolation =
894 device->has_exec_timeline =
897 device->has_exec_timeline = false;
901 device->sync_syncobj_type = vk_drm_syncobj_get_type(fd);
902 if (!device->has_exec_timeline)
903 device->sync_syncobj_type.features &= ~VK_SYNC_FEATURE_TIMELINE;
904 device->sync_types[st_idx++] = &device->sync_syncobj_type;
906 if (!(device->sync_syncobj_type.features & VK_SYNC_FEATURE_CPU_WAIT))
907 device->sync_types[st_idx++] = &anv_bo_sync_type;
909 if (!(device->sync_syncobj_type.features & VK_SYNC_FEATURE_TIMELINE)) {
910 device->sync_timeline_type = vk_sync_timeline_get_type(&anv_bo_sync_type);
911 device->sync_types[st_idx++] = &device->sync_timeline_type.sync;
914 device->sync_types[st_idx++] = NULL;
915 assert(st_idx <= ARRAY_SIZE(device->sync_types));
916 device->vk.supported_sync_types = device->sync_types;
918 device->vk.pipeline_cache_import_ops = anv_cache_import_ops;
920 device->always_use_bindless =
923 device->use_call_secondary =
924 device->use_softpin &&
930 device->has_a64_buffer_access = device->info.ver >= 8 &&
931 device->use_softpin;
935 device->has_bindless_images = device->info.ver >= 9;
942 device->has_bindless_samplers = device->info.ver >= 8;
944 device->has_implicit_ccs = device->info.has_aux_map ||
945 device->info.verx10 >= 125;
949 device->has_reg_timestamp = anv_gem_reg_read(fd, TIMESTAMP | I915_REG_READ_8B_WA,
952 device->always_flush_cache = INTEL_DEBUG(DEBUG_STALL) ||
955 device->has_mmap_offset =
958 device->has_userptr_probe =
961 device->compiler = brw_compiler_create(NULL, &device->info);
962 if (device->compiler == NULL) {
966 device->compiler->shader_debug_log = compiler_debug_log;
967 device->compiler->shader_perf_log = compiler_perf_log;
968 device->compiler->constant_buffer_0_is_relative =
969 device->info.ver < 8 || !device->has_context_isolation;
970 device->compiler->supports_shader_constants = true;
971 device->compiler->indirect_ubos_use_sampler = device->info.ver < 12;
973 isl_device_init(&device->isl_dev, &device->info);
975 result = anv_physical_device_init_uuids(device);
979 anv_physical_device_init_disk_cache(device);
984 /* prod the device with a GETPARAM call which will fail if
985 * we don't have permission to even render on this device
993 device->master_fd = master_fd;
995 device->engine_info = anv_gem_get_engine_info(fd);
996 anv_physical_device_init_queue_families(device);
998 device->local_fd = fd;
1000 anv_physical_device_init_perf(device, fd);
1002 get_device_extensions(device, &device->vk.supported_extensions);
1004 result = anv_init_wsi(device);
1008 anv_measure_device_init(device);
1010 anv_genX(&device->info, init_physical_device_state)(device);
1012 *device_out = device;
1017 device->has_master = true;
1018 device->master_major = major(st.st_rdev);
1019 device->master_minor = minor(st.st_rdev);
1021 device->has_master = false;
1022 device->master_major = 0;
1023 device->master_minor = 0;
1027 device->has_local = true;
1028 device->local_major = major(st.st_rdev);
1029 device->local_minor = minor(st.st_rdev);
1031 device->has_local = false;
1032 device->local_major = 0;
1033 device->local_minor = 0;
1039 ralloc_free(device->perf);
1040 free(device->engine_info);
1041 anv_physical_device_free_disk_cache(device);
1043 ralloc_free(device->compiler);
1045 vk_physical_device_finish(&device->vk);
1047 vk_free(&instance->vk.alloc, device);
1056 anv_physical_device_destroy(struct anv_physical_device *device)
1058 anv_finish_wsi(device);
1059 anv_measure_device_destroy(device);
1060 free(device->engine_info);
1061 anv_physical_device_free_disk_cache(device);
1062 ralloc_free(device->compiler);
1063 ralloc_free(device->perf);
1064 close(device->local_fd);
1065 if (device->master_fd >= 0)
1066 close(device->master_fd);
1067 vk_physical_device_finish(&device->vk);
1068 vk_free(&device->instance->vk.alloc, device);
1195 /* Incompatible DRM device, skip. */
1201 /* Error creating the physical device, report the error. */
2455 * "Note that the device divides a pixel into a 16x16 array of
2758 ANV_FROM_HANDLE(anv_physical_device, device, physicalDevice);
2760 if (!device->vk.supported_extensions.EXT_memory_budget)
2763 anv_update_meminfo(device, device->local_fd);
2766 for (size_t i = 0; i < device->memory.heap_count; i++) {
2767 if (device->memory.heaps[i].is_local_mem) {
2768 total_vram_heaps_size += device->memory.heaps[i].size;
2770 total_sys_heaps_size += device->memory.heaps[i].size;
2774 for (size_t i = 0; i < device->memory.heap_count; i++) {
2775 VkDeviceSize heap_size = device->memory.heaps[i].size;
2776 VkDeviceSize heap_used = device->memory.heaps[i].used;
2780 if (device->memory.heaps[i].is_local_mem) {
2782 if (device->vram_non_mappable.size > 0 && i == 0) {
2783 mem_available = device->vram_non_mappable.available;
2785 mem_available = device->vram_mappable.available;
2789 mem_available = device->sys.available;
2821 for (uint32_t i = device->memory.heap_count; i < VK_MAX_MEMORY_HEAPS; i++) {
2848 VkDevice device,
2915 anv_device_init_border_colors(struct anv_device *device)
2917 if (device->info.platform == INTEL_PLATFORM_HSW) {
2927 device->border_colors =
2928 anv_state_pool_emit_data(&device->dynamic_state_pool,
2940 device->border_colors =
2941 anv_state_pool_emit_data(&device->dynamic_state_pool,
2947 anv_device_init_trivial_batch(struct anv_device *device)
2949 VkResult result = anv_device_alloc_bo(device, "trivial-batch", 4096,
2952 &device->trivial_batch_bo);
2957 .start = device->trivial_batch_bo->map,
2958 .next = device->trivial_batch_bo->map,
2959 .end = device->trivial_batch_bo->map + 4096,
2965 if (device->physical->memory.need_clflush)
2994 struct anv_device *device = v_batch;
2999 if (get_bo_from_pool(&ret_bo, &device->dynamic_state_pool.block_pool, address))
3001 if (get_bo_from_pool(&ret_bo, &device->instruction_state_pool.block_pool, address))
3003 if (get_bo_from_pool(&ret_bo, &device->binding_table_pool.block_pool, address))
3005 if (get_bo_from_pool(&ret_bo, &device->surface_state_pool.block_pool, address))
3008 if (!device->cmd_buffer_being_decoded)
3013 u_vector_foreach(bo, &device->cmd_buffer_being_decoded->seen_bbos) {
3041 struct anv_device *device = (struct anv_device*)driver_ctx;
3042 assert(device->physical->supports_48bit_addresses &&
3043 device->physical->use_softpin);
3045 struct anv_state_pool *pool = &device->dynamic_state_pool;
3059 struct anv_device *device = (struct anv_device*)driver_ctx;
3060 struct anv_state_pool *pool = &device->dynamic_state_pool;
3080 struct anv_device *device;
3124 device = vk_zalloc2(&physical_device->instance->vk.alloc, pAllocator,
3125 sizeof(*device), 8,
3127 if (!device)
3138 result = vk_device_init(&device->vk, &physical_device->vk,
3150 intel_batch_decode_ctx_init(&device->decoder_ctx,
3154 decode_get_bo, NULL, device);
3156 device->decoder_ctx.dynamic_base = DYNAMIC_STATE_POOL_MIN_ADDRESS;
3157 device->decoder_ctx.surface_base = SURFACE_STATE_POOL_MIN_ADDRESS;
3158 device->decoder_ctx.instruction_base =
3162 device->physical = physical_device;
3165 device->fd = open(physical_device->path, O_RDWR | O_CLOEXEC);
3166 if (device->fd == -1) {
3167 result = vk_error(device, VK_ERROR_INITIALIZATION_FAILED);
3171 device->vk.check_status = anv_device_check_status;
3172 device->vk.create_sync_for_memory = anv_create_sync_for_memory;
3173 vk_device_set_drm_fd(&device->vk, device->fd);
3179 if (device->physical->engine_info) {
3196 device->context_id =
3197 intel_gem_create_context_engines(device->fd,
3202 device->context_id = anv_gem_create_context(device);
3204 if (device->context_id == -1) {
3205 result = vk_error(device, VK_ERROR_INITIALIZATION_FAILED);
3215 anv_gem_set_context_param(device->fd, device->context_id,
3218 device->queues =
3219 vk_zalloc(&device->vk.alloc, num_queues * sizeof(*device->queues), 8,
3221 if (device->queues == NULL) {
3222 result = vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
3226 device->queue_count = 0;
3236 uint32_t exec_flags = device->physical->engine_info ?
3237 device->queue_count : I915_EXEC_RENDER;
3239 result = anv_queue_init(device, &device->queues[device->queue_count],
3244 device->queue_count++;
3249 if (pthread_mutex_init(&device->vma_mutex, NULL) != 0) {
3250 result = vk_error(device, VK_ERROR_INITIALIZATION_FAILED);
3255 util_vma_heap_init(&device->vma_lo,
3258 util_vma_heap_init(&device->vma_cva, CLIENT_VISIBLE_HEAP_MIN_ADDRESS,
3265 util_vma_heap_init(&device->vma_hi, HIGH_HEAP_MIN_ADDRESS,
3270 list_inithead(&device->memory_objects);
3278 int err = anv_gem_set_context_param(device->fd, device->context_id,
3282 result = vk_error(device, VK_ERROR_NOT_PERMITTED_KHR);
3287 device->info = physical_device->info;
3288 device->isl_dev = physical_device->isl_dev;
3295 device->can_chain_batches = device->info.ver >= 8;
3297 device->robust_buffer_access = robust_buffer_access;
3299 if (pthread_mutex_init(&device->mutex, NULL) != 0) {
3300 result = vk_error(device, VK_ERROR_INITIALIZATION_FAILED);
3306 result = vk_error(device, VK_ERROR_INITIALIZATION_FAILED);
3311 result = vk_error(device, VK_ERROR_INITIALIZATION_FAILED);
3314 if (pthread_cond_init(&device->queue_submit, &condattr) != 0) {
3316 result = vk_error(device, VK_ERROR_INITIALIZATION_FAILED);
3321 result = anv_bo_cache_init(&device->bo_cache, device);
3325 anv_bo_pool_init(&device->batch_bo_pool, device, "batch");
3331 result = anv_state_pool_init(&device->general_state_pool, device,
3337 result = anv_state_pool_init(&device->dynamic_state_pool, device,
3343 if (device->info.ver >= 8) {
3350 anv_state_reserved_pool_init(&device->custom_border_colors,
3351 &device->dynamic_state_pool,
3356 result = anv_state_pool_init(&device->instruction_state_pool, device,
3362 result = anv_state_pool_init(&device->surface_state_pool, device,
3368 if (device->info.verx10 >= 125) {
3372 result = anv_state_pool_init(&device->binding_table_pool, device,
3380 result = anv_state_pool_init(&device->binding_table_pool, device,
3389 if (device->info.has_aux_map) {
3390 device->aux_map_ctx = intel_aux_map_init(device, &aux_map_allocator,
3392 if (!device->aux_map_ctx)
3396 result = anv_device_alloc_bo(device, "workaround", 4096,
3401 &device->workaround_bo);
3405 device->workaround_address = (struct anv_address) {
3406 .bo = device->workaround_bo,
3408 intel_debug_write_identifiers(device->workaround_bo->map,
3409 device->workaround_bo->size,
3413 device->debug_frame_desc =
3414 intel_debug_get_identifier_block(device->workaround_bo->map,
3415 device->workaround_bo->size,
3418 if (device->vk.enabled_extensions.KHR_ray_query) {
3420 align_u32(brw_rt_ray_queries_hw_stacks_size(&device->info), 4096);
3422 result = anv_device_alloc_bo(device, "ray queries",
3426 &device->ray_query_bo);
3431 result = anv_device_init_trivial_batch(device);
3435 if (device->info.ver >= 12 &&
3436 device->vk.enabled_extensions.KHR_fragment_shading_rate) {
3439 if (device->info.has_coarse_pixel_primitive_and_cb)
3447 device->cps_states =
3448 anv_state_pool_alloc(&device->dynamic_state_pool,
3449 n_cps_states * CPS_STATE_length(&device->info) * 4,
3451 if (device->cps_states.map == NULL)
3454 anv_genX(&device->info, init_cps_device_state)(device);
3461 device->null_surface_state =
3462 anv_state_pool_alloc(&device->surface_state_pool,
3463 device->isl_dev.ss.size,
3464 device->isl_dev.ss.align);
3465 isl_null_fill_state(&device->isl_dev, device->null_surface_state.map,
3467 assert(device->null_surface_state.offset == 0);
3469 anv_scratch_pool_init(device, &device->scratch_pool);
3472 memset(device->rt_scratch_bos, 0, sizeof(device->rt_scratch_bos));
3474 result = anv_genX(&device->info, init_device_state)(device);
3479 device->default_pipeline_cache =
3480 vk_pipeline_cache_create(&device->vk, &pcc_info, NULL);
3481 if (!device->default_pipeline_cache) {
3482 result = vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
3492 device->internal_cache =
3493 vk_pipeline_cache_create(&device->vk, &pcc_info, NULL);
3494 if (device->internal_cache == NULL) {
3495 result = vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
3499 result = anv_device_init_rt_shaders(device);
3501 result = vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
3505 anv_device_init_blorp(device);
3507 anv_device_init_border_colors(device);
3509 anv_device_perf_init(device);
3511 anv_device_utrace_init(device);
3513 *pDevice = anv_device_to_handle(device);
3518 vk_pipeline_cache_destroy(device->internal_cache, NULL);
3520 vk_pipeline_cache_destroy(device->default_pipeline_cache, NULL);
3522 anv_scratch_pool_finish(device, &device->scratch_pool);
3524 anv_device_release_bo(device, device->trivial_batch_bo);
3526 if (device->ray_query_bo)
3527 anv_device_release_bo(device, device->ray_query_bo);
3529 anv_device_release_bo(device, device->workaround_bo);
3531 if (device->info.has_aux_map) {
3532 intel_aux_map_finish(device->aux_map_ctx);
3533 device->aux_map_ctx = NULL;
3537 anv_state_pool_finish(&device->binding_table_pool);
3539 anv_state_pool_finish(&device->surface_state_pool);
3541 anv_state_pool_finish(&device->instruction_state_pool);
3543 if (device->info.ver >= 8)
3544 anv_state_reserved_pool_finish(&device->custom_border_colors);
3545 anv_state_pool_finish(&device->dynamic_state_pool);
3547 anv_state_pool_finish(&device->general_state_pool);
3549 anv_bo_pool_finish(&device->batch_bo_pool);
3550 anv_bo_cache_finish(&device->bo_cache);
3552 pthread_cond_destroy(&device->queue_submit);
3554 pthread_mutex_destroy(&device->mutex);
3557 util_vma_heap_finish(&device->vma_hi);
3558 util_vma_heap_finish(&device->vma_cva);
3559 util_vma_heap_finish(&device->vma_lo);
3562 for (uint32_t i = 0; i < device->queue_count; i++)
3563 anv_queue_finish(&device->queues[i]);
3564 vk_free(&device->vk.alloc, device->queues);
3566 anv_gem_destroy_context(device, device->context_id);
3568 close(device->fd);
3570 vk_device_finish(&device->vk);
3572 vk_free(&device->vk.alloc, device);
3581 ANV_FROM_HANDLE(anv_device, device, _device);
3583 if (!device)
3586 anv_device_utrace_finish(device);
3588 anv_device_finish_blorp(device);
3590 anv_device_finish_rt_shaders(device);
3592 vk_pipeline_cache_destroy(device->internal_cache, NULL);
3593 vk_pipeline_cache_destroy(device->default_pipeline_cache, NULL);
3599 if (device->info.ver >= 8)
3600 anv_state_reserved_pool_finish(&device->custom_border_colors);
3601 anv_state_pool_free(&device->dynamic_state_pool, device->border_colors);
3602 anv_state_pool_free(&device->dynamic_state_pool, device->slice_hash);
3603 anv_state_pool_free(&device->dynamic_state_pool, device->cps_states);
3606 for (unsigned i = 0; i < ARRAY_SIZE(device->rt_scratch_bos); i++) {
3607 if (device->rt_scratch_bos[i] != NULL)
3608 anv_device_release_bo(device, device->rt_scratch_bos[i]);
3611 anv_scratch_pool_finish(device, &device->scratch_pool);
3613 if (device->vk.enabled_extensions.KHR_ray_query) {
3614 for (unsigned i = 0; i < ARRAY_SIZE(device->ray_query_shadow_bos); i++) {
3615 if (device->ray_query_shadow_bos[i] != NULL)
3616 anv_device_release_bo(device, device->ray_query_shadow_bos[i]);
3618 anv_device_release_bo(device, device->ray_query_bo);
3620 anv_device_release_bo(device, device->workaround_bo);
3621 anv_device_release_bo(device, device->trivial_batch_bo);
3623 if (device->info.has_aux_map) {
3624 intel_aux_map_finish(device->aux_map_ctx);
3625 device->aux_map_ctx = NULL;
3628 if (!anv_use_relocations(device->physical))
3629 anv_state_pool_finish(&device->binding_table_pool);
3630 anv_state_pool_finish(&device->surface_state_pool);
3631 anv_state_pool_finish(&device->instruction_state_pool);
3632 anv_state_pool_finish(&device->dynamic_state_pool);
3633 anv_state_pool_finish(&device->general_state_pool);
3635 anv_bo_pool_finish(&device->batch_bo_pool);
3637 anv_bo_cache_finish(&device->bo_cache);
3639 if (!anv_use_relocations(device->physical)) {
3640 util_vma_heap_finish(&device->vma_hi);
3641 util_vma_heap_finish(&device->vma_cva);
3642 util_vma_heap_finish(&device->vma_lo);
3645 pthread_cond_destroy(&device->queue_submit);
3646 pthread_mutex_destroy(&device->mutex);
3648 for (uint32_t i = 0; i < device->queue_count; i++)
3649 anv_queue_finish(&device->queues[i]);
3650 vk_free(&device->vk.alloc, device->queues);
3652 anv_gem_destroy_context(device, device->context_id);
3655 intel_batch_decode_ctx_finish(&device->decoder_ctx);
3657 close(device->fd);
3659 vk_device_finish(&device->vk);
3660 vk_free(&device->vk.alloc, device);
3679 struct anv_device *device = container_of(vk_device, struct anv_device, vk);
3682 int ret = anv_gem_context_get_reset_stats(device->fd, device->context_id,
3686 return vk_device_set_lost(&device->vk, "get_reset_stats failed: %m");
3690 return vk_device_set_lost(&device->vk, "GPU hung on one of our command buffers");
3692 return vk_device_set_lost(&device->vk, "GPU hung with commands in-flight");
3699 anv_device_wait(struct anv_device *device, struct anv_bo *bo,
3702 int ret = anv_gem_wait(device, bo->gem_handle, &timeout);
3707 return vk_device_set_lost(&device->vk, "gem wait failed: %m");
3714 anv_vma_alloc(struct anv_device *device,
3719 pthread_mutex_lock(&device->vma_mutex);
3725 if (util_vma_heap_alloc_addr(&device->vma_cva,
3730 addr = util_vma_heap_alloc(&device->vma_cva, size, align);
3739 addr = util_vma_heap_alloc(&device->vma_hi, size, align);
3742 addr = util_vma_heap_alloc(&device->vma_lo, size, align);
3745 pthread_mutex_unlock(&device->vma_mutex);
3752 anv_vma_free(struct anv_device *device,
3757 pthread_mutex_lock(&device->vma_mutex);
3761 util_vma_heap_free(&device->vma_lo, addr_48b, size);
3764 util_vma_heap_free(&device->vma_cva, addr_48b, size);
3767 util_vma_heap_free(&device->vma_hi, addr_48b, size);
3770 pthread_mutex_unlock(&device->vma_mutex);
3779 ANV_FROM_HANDLE(anv_device, device, _device);
3780 struct anv_physical_device *pdevice = device->physical;
3793 return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
3804 return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
3806 mem = vk_object_alloc(&device->vk, pAllocator, sizeof(*mem),
3809 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
3874 if (device->physical->has_implicit_ccs && device->info.has_aux_map)
3929 result = anv_device_import_bo(device, fd_info->fd, alloc_flags,
3943 result = vk_errorf(device, VK_ERROR_INVALID_EXTERNAL_HANDLE,
3948 anv_device_release_bo(device, mem->bo);
3968 result = vk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
3975 result = anv_device_import_bo_from_host_ptr(device,
3988 /* Set ALLOC_LOCAL_MEM flag if heap has device local bit set and requested
3996 result = anv_device_alloc_bo(device, "user", pAllocateInfo->allocationSize,
4009 result = anv_device_set_bo_tiling(device, mem->bo,
4013 anv_device_release_bo(device, mem->bo);
4023 anv_device_release_bo(device, mem->bo);
4024 result = vk_errorf(device, VK_ERROR_OUT_OF_DEVICE_MEMORY,
4029 pthread_mutex_lock(&device->mutex);
4030 list_addtail(&mem->link, &device->memory_objects);
4031 pthread_mutex_unlock(&device->mutex);
4038 vk_object_free(&device->vk, pAllocator, mem);
4065 ANV_FROM_HANDLE(anv_device, device, _device);
4071 (1 << device->physical->memory.type_count) - 1;
4082 return vk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
4092 ANV_FROM_HANDLE(anv_device, device, _device);
4101 (1ull << device->physical->memory.type_count) - 1;
4115 ANV_FROM_HANDLE(anv_device, device, _device);
4121 pthread_mutex_lock(&device->mutex);
4123 pthread_mutex_unlock(&device->mutex);
4128 p_atomic_add(&device->physical->memory.heaps[mem->type->heapIndex].used,
4131 anv_device_release_bo(device, mem->bo);
4138 vk_object_free(&device->vk, pAllocator, mem);
4149 ANV_FROM_HANDLE(anv_device, device, _device);
4176 return vk_errorf(device, VK_ERROR_MEMORY_MAP_FAILED,
4186 return vk_errorf(device, VK_ERROR_MEMORY_MAP_FAILED,
4192 if (!device->info.has_llc &&
4198 if (!device->physical->has_mmap_offset)
4209 VkResult result = anv_device_map_bo(device, mem->bo, map_offset,
4226 ANV_FROM_HANDLE(anv_device, device, _device);
4232 anv_device_unmap_bo(device, mem->bo, mem->map, mem->map_size);
4244 ANV_FROM_HANDLE(anv_device, device, _device);
4246 if (!device->physical->memory.need_clflush)
4274 ANV_FROM_HANDLE(anv_device, device, _device);
4276 if (!device->physical->memory.need_clflush)
4300 VkDevice device,
4328 VkDevice device,
4345 if (vk_device_is_lost(&queue->device->vk))
4359 ANV_FROM_HANDLE(anv_device, device, _device);
4364 event = vk_object_alloc(&device->vk, pAllocator, sizeof(*event),
4367 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
4369 event->state = anv_state_pool_alloc(&device->dynamic_state_pool,
4383 ANV_FROM_HANDLE(anv_device, device, _device);
4389 anv_state_pool_free(&device->dynamic_state_pool, event->state);
4391 vk_object_free(&device->vk, pAllocator, event);
4398 ANV_FROM_HANDLE(anv_device, device, _device);
4401 if (vk_device_is_lost(&device->vk))
4432 anv_get_buffer_memory_requirements(struct anv_device *device,
4442 * structure for the physical device is supported.
4444 uint32_t memory_types = (1ull << device->physical->memory.type_count) - 1;
4460 if (device->robust_buffer_access &&
4488 ANV_FROM_HANDLE(anv_device, device, _device);
4491 anv_get_buffer_memory_requirements(device,
4502 ANV_FROM_HANDLE(anv_device, device, _device);
4504 anv_get_buffer_memory_requirements(device,
4516 ANV_FROM_HANDLE(anv_device, device, _device);
4524 if (pCreateInfo->size > device->physical->gtt_size)
4525 return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
4527 buffer = vk_buffer_create(&device->vk, pCreateInfo,
4530 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
4544 ANV_FROM_HANDLE(anv_device, device, _device);
4550 vk_buffer_destroy(&device->vk, pAllocator, &buffer->vk);
4554 VkDevice device,
4566 VkDevice device,
4573 VkDevice device,
4585 anv_fill_buffer_surface_state(struct anv_device *device, struct anv_state state,
4592 isl_buffer_fill_state(&device->isl_dev, state.map,
4594 .mocs = isl_mocs(&device->isl_dev, usage,
4607 ANV_FROM_HANDLE(anv_device, device, _device);
4614 anv_state_pool_free(&device->dynamic_state_pool,
4619 anv_state_reserved_pool_free(&device->custom_border_colors,
4623 vk_object_free(&device->vk, pAllocator, sampler);
4675 ANV_FROM_HANDLE(anv_device, device, _device);
4676 uint64_t timestamp_frequency = device->info.timestamp_frequency;
4691 ret = anv_gem_reg_read(device->fd, TIMESTAMP | I915_REG_READ_8B_WA,
4695 return vk_device_set_lost(&device->vk, "Failed to read the "