Home
last modified time | relevance | path

Searched refs:vm (Results 1 - 25 of 731) sorted by relevance

12345678910>>...30

/kernel/linux/linux-6.6/drivers/virtio/
H A Dvirtio_mem.c278 static void virtio_mem_retry(struct virtio_mem *vm);
279 static int virtio_mem_create_resource(struct virtio_mem *vm);
280 static void virtio_mem_delete_resource(struct virtio_mem *vm);
286 static int register_virtio_mem_device(struct virtio_mem *vm) in register_virtio_mem_device() argument
295 list_add_rcu(&vm->next, &virtio_mem_devices); in register_virtio_mem_device()
305 static void unregister_virtio_mem_device(struct virtio_mem *vm) in unregister_virtio_mem_device() argument
309 list_del_rcu(&vm->next); in unregister_virtio_mem_device()
336 static unsigned long virtio_mem_phys_to_bb_id(struct virtio_mem *vm, in virtio_mem_phys_to_bb_id() argument
339 return addr / vm->bbm.bb_size; in virtio_mem_phys_to_bb_id()
345 static uint64_t virtio_mem_bb_id_to_phys(struct virtio_mem *vm, in virtio_mem_bb_id_to_phys() argument
354 virtio_mem_phys_to_sb_id(struct virtio_mem *vm, unsigned long addr) virtio_mem_phys_to_sb_id() argument
366 virtio_mem_bbm_set_bb_state(struct virtio_mem *vm, unsigned long bb_id, enum virtio_mem_bbm_bb_state state) virtio_mem_bbm_set_bb_state() argument
384 virtio_mem_bbm_get_bb_state(struct virtio_mem *vm, unsigned long bb_id) virtio_mem_bbm_get_bb_state() argument
393 virtio_mem_bbm_bb_states_prepare_next_bb(struct virtio_mem *vm) virtio_mem_bbm_bb_states_prepare_next_bb() argument
433 virtio_mem_sbm_set_mb_state(struct virtio_mem *vm, unsigned long mb_id, uint8_t state) virtio_mem_sbm_set_mb_state() argument
450 virtio_mem_sbm_get_mb_state(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_get_mb_state() argument
461 virtio_mem_sbm_mb_states_prepare_next_mb(struct virtio_mem *vm) virtio_mem_sbm_mb_states_prepare_next_mb() argument
500 virtio_mem_sbm_sb_state_bit_nr(struct virtio_mem *vm, unsigned long mb_id, int sb_id) virtio_mem_sbm_sb_state_bit_nr() argument
511 virtio_mem_sbm_set_sb_plugged(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_sbm_set_sb_plugged() argument
525 virtio_mem_sbm_set_sb_unplugged(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_sbm_set_sb_unplugged() argument
537 virtio_mem_sbm_test_sb_plugged(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_sbm_test_sb_plugged() argument
554 virtio_mem_sbm_test_sb_unplugged(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_sbm_test_sb_unplugged() argument
569 virtio_mem_sbm_first_unplugged_sb(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_first_unplugged_sb() argument
581 virtio_mem_sbm_sb_states_prepare_next_mb(struct virtio_mem *vm) virtio_mem_sbm_sb_states_prepare_next_mb() argument
613 virtio_mem_could_add_memory(struct virtio_mem *vm, uint64_t size) virtio_mem_could_add_memory() argument
629 virtio_mem_add_memory(struct virtio_mem *vm, uint64_t addr, uint64_t size) virtio_mem_add_memory() argument
665 virtio_mem_sbm_add_mb(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_add_mb() argument
676 virtio_mem_bbm_add_bb(struct virtio_mem *vm, unsigned long bb_id) virtio_mem_bbm_add_bb() argument
693 virtio_mem_remove_memory(struct virtio_mem *vm, uint64_t addr, uint64_t size) virtio_mem_remove_memory() argument
717 virtio_mem_sbm_remove_mb(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_remove_mb() argument
733 virtio_mem_offline_and_remove_memory(struct virtio_mem *vm, uint64_t addr, uint64_t size) virtio_mem_offline_and_remove_memory() argument
766 virtio_mem_sbm_offline_and_remove_mb(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_offline_and_remove_mb() argument
781 virtio_mem_sbm_try_remove_unplugged_mb(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_try_remove_unplugged_mb() argument
807 virtio_mem_bbm_offline_and_remove_bb(struct virtio_mem *vm, unsigned long bb_id) virtio_mem_bbm_offline_and_remove_bb() argument
819 virtio_mem_retry(struct virtio_mem *vm) virtio_mem_retry() argument
829 virtio_mem_translate_node_id(struct virtio_mem *vm, uint16_t node_id) virtio_mem_translate_node_id() argument
844 virtio_mem_overlaps_range(struct virtio_mem *vm, uint64_t start, uint64_t size) virtio_mem_overlaps_range() argument
854 virtio_mem_contains_range(struct virtio_mem *vm, uint64_t start, uint64_t size) virtio_mem_contains_range() argument
860 virtio_mem_sbm_notify_going_online(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_notify_going_online() argument
875 virtio_mem_sbm_notify_offline(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_notify_offline() argument
895 virtio_mem_sbm_notify_online(struct virtio_mem *vm, unsigned long mb_id, unsigned long start_pfn) virtio_mem_sbm_notify_online() argument
920 virtio_mem_sbm_notify_going_offline(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_notify_going_offline() argument
936 virtio_mem_sbm_notify_cancel_offline(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_notify_cancel_offline() argument
952 virtio_mem_bbm_notify_going_offline(struct virtio_mem *vm, unsigned long bb_id, unsigned long pfn, unsigned long nr_pages) virtio_mem_bbm_notify_going_offline() argument
967 virtio_mem_bbm_notify_cancel_offline(struct virtio_mem *vm, unsigned long bb_id, unsigned long pfn, unsigned long nr_pages) virtio_mem_bbm_notify_cancel_offline() argument
986 struct virtio_mem *vm = container_of(nb, struct virtio_mem, virtio_mem_memory_notifier_cb() local
1192 virtio_mem_fake_offline(struct virtio_mem *vm, unsigned long pfn, unsigned long nr_pages) virtio_mem_fake_offline() argument
1274 virtio_mem_online_page(struct virtio_mem *vm, struct page *page, unsigned int order) virtio_mem_online_page() argument
1336 struct virtio_mem *vm; virtio_mem_online_page_cb() local
1364 virtio_mem_send_request(struct virtio_mem *vm, const struct virtio_mem_req *req) virtio_mem_send_request() argument
1394 virtio_mem_send_plug_request(struct virtio_mem *vm, uint64_t addr, uint64_t size) virtio_mem_send_plug_request() argument
1432 virtio_mem_send_unplug_request(struct virtio_mem *vm, uint64_t addr, uint64_t size) virtio_mem_send_unplug_request() argument
1467 virtio_mem_send_unplug_all_request(struct virtio_mem *vm) virtio_mem_send_unplug_all_request() argument
1498 virtio_mem_sbm_plug_sb(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_sbm_plug_sb() argument
1516 virtio_mem_sbm_unplug_sb(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_sbm_unplug_sb() argument
1535 virtio_mem_bbm_unplug_bb(struct virtio_mem *vm, unsigned long bb_id) virtio_mem_bbm_unplug_bb() argument
1548 virtio_mem_bbm_plug_bb(struct virtio_mem *vm, unsigned long bb_id) virtio_mem_bbm_plug_bb() argument
1565 virtio_mem_sbm_unplug_any_sb_raw(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb) virtio_mem_sbm_unplug_any_sb_raw() argument
1604 virtio_mem_sbm_unplug_mb(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_sbm_unplug_mb() argument
1614 virtio_mem_sbm_prepare_next_mb(struct virtio_mem *vm, unsigned long *mb_id) virtio_mem_sbm_prepare_next_mb() argument
1643 virtio_mem_sbm_plug_and_add_mb(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb) virtio_mem_sbm_plug_and_add_mb() argument
1694 virtio_mem_sbm_plug_any_sb(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb) virtio_mem_sbm_plug_any_sb() argument
1735 virtio_mem_sbm_plug_request(struct virtio_mem *vm, uint64_t diff) virtio_mem_sbm_plug_request() argument
1803 virtio_mem_bbm_plug_and_add_bb(struct virtio_mem *vm, unsigned long bb_id) virtio_mem_bbm_plug_and_add_bb() argument
1834 virtio_mem_bbm_prepare_next_bb(struct virtio_mem *vm, unsigned long *bb_id) virtio_mem_bbm_prepare_next_bb() argument
1853 virtio_mem_bbm_plug_request(struct virtio_mem *vm, uint64_t diff) virtio_mem_bbm_plug_request() argument
1897 virtio_mem_plug_request(struct virtio_mem *vm, uint64_t diff) virtio_mem_plug_request() argument
1913 virtio_mem_sbm_unplug_any_sb_offline(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb) virtio_mem_sbm_unplug_any_sb_offline() argument
1951 virtio_mem_sbm_unplug_sb_online(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_sbm_unplug_sb_online() argument
1999 virtio_mem_sbm_unplug_any_sb_online(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb) virtio_mem_sbm_unplug_any_sb_online() argument
2054 virtio_mem_sbm_unplug_any_sb(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb) virtio_mem_sbm_unplug_any_sb() argument
2073 virtio_mem_sbm_unplug_request(struct virtio_mem *vm, uint64_t diff) virtio_mem_sbm_unplug_request() argument
2133 virtio_mem_bbm_offline_remove_and_unplug_bb(struct virtio_mem *vm, unsigned long bb_id) virtio_mem_bbm_offline_remove_and_unplug_bb() argument
2199 virtio_mem_bbm_bb_is_offline(struct virtio_mem *vm, unsigned long bb_id) virtio_mem_bbm_bb_is_offline() argument
2218 virtio_mem_bbm_bb_is_movable(struct virtio_mem *vm, unsigned long bb_id) virtio_mem_bbm_bb_is_movable() argument
2238 virtio_mem_bbm_unplug_request(struct virtio_mem *vm, uint64_t diff) virtio_mem_bbm_unplug_request() argument
2281 virtio_mem_unplug_request(struct virtio_mem *vm, uint64_t diff) virtio_mem_unplug_request() argument
2293 virtio_mem_cleanup_pending_mb(struct virtio_mem *vm) virtio_mem_cleanup_pending_mb() argument
2345 virtio_mem_refresh_config(struct virtio_mem *vm) virtio_mem_refresh_config() argument
2392 struct virtio_mem *vm = container_of(work, struct virtio_mem, wq); virtio_mem_run_wq() local
2481 struct virtio_mem *vm = container_of(timer, struct virtio_mem, virtio_mem_timer_expired() local
2492 struct virtio_mem *vm = vq->vdev->priv; virtio_mem_handle_response() local
2497 virtio_mem_init_vq(struct virtio_mem *vm) virtio_mem_init_vq() argument
2510 virtio_mem_init_hotplug(struct virtio_mem *vm) virtio_mem_init_hotplug() argument
2633 virtio_mem_send_state_request(struct virtio_mem *vm, uint64_t addr, uint64_t size) virtio_mem_send_state_request() argument
2664 struct virtio_mem *vm = container_of(cb, struct virtio_mem, virtio_mem_vmcore_pfn_is_ram() local
2699 virtio_mem_init_kdump(struct virtio_mem *vm) virtio_mem_init_kdump() argument
2712 virtio_mem_init(struct virtio_mem *vm) virtio_mem_init() argument
2753 virtio_mem_create_resource(struct virtio_mem *vm) virtio_mem_create_resource() argument
2781 virtio_mem_delete_resource(struct virtio_mem *vm) virtio_mem_delete_resource() argument
2800 virtio_mem_has_memory_added(struct virtio_mem *vm) virtio_mem_has_memory_added() argument
2811 struct virtio_mem *vm; virtio_mem_probe() local
2860 virtio_mem_deinit_hotplug(struct virtio_mem *vm) virtio_mem_deinit_hotplug() argument
2925 virtio_mem_deinit_kdump(struct virtio_mem *vm) virtio_mem_deinit_kdump() argument
2934 struct virtio_mem *vm = vdev->priv; virtio_mem_remove() local
2951 struct virtio_mem *vm = vdev->priv; virtio_mem_config_changed() local
[all...]
/kernel/linux/linux-5.10/drivers/virtio/
H A Dvirtio_mem.c168 static int register_virtio_mem_device(struct virtio_mem *vm) in register_virtio_mem_device() argument
177 list_add_rcu(&vm->next, &virtio_mem_devices); in register_virtio_mem_device()
187 static void unregister_virtio_mem_device(struct virtio_mem *vm) in unregister_virtio_mem_device() argument
191 list_del_rcu(&vm->next); in unregister_virtio_mem_device()
218 static unsigned long virtio_mem_phys_to_sb_id(struct virtio_mem *vm, in virtio_mem_phys_to_sb_id() argument
224 return (addr - mb_addr) / vm->subblock_size; in virtio_mem_phys_to_sb_id()
230 static void virtio_mem_mb_set_state(struct virtio_mem *vm, unsigned long mb_id, in virtio_mem_mb_set_state() argument
233 const unsigned long idx = mb_id - vm->first_mb_id; in virtio_mem_mb_set_state()
236 old_state = vm->mb_state[idx]; in virtio_mem_mb_set_state()
237 vm in virtio_mem_mb_set_state()
247 virtio_mem_mb_get_state(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_mb_get_state() argument
258 virtio_mem_mb_state_prepare_next_mb(struct virtio_mem *vm) virtio_mem_mb_state_prepare_next_mb() argument
300 virtio_mem_mb_set_sb_plugged(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_mb_set_sb_plugged() argument
314 virtio_mem_mb_set_sb_unplugged(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_mb_set_sb_unplugged() argument
326 virtio_mem_mb_test_sb_plugged(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_mb_test_sb_plugged() argument
343 virtio_mem_mb_test_sb_unplugged(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_mb_test_sb_unplugged() argument
357 virtio_mem_mb_first_unplugged_sb(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_mb_first_unplugged_sb() argument
369 virtio_mem_sb_bitmap_prepare_next_mb(struct virtio_mem *vm) virtio_mem_sb_bitmap_prepare_next_mb() argument
406 virtio_mem_mb_add(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_mb_add() argument
440 virtio_mem_mb_remove(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_mb_remove() argument
460 virtio_mem_mb_offline_and_remove(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_mb_offline_and_remove() argument
477 virtio_mem_retry(struct virtio_mem *vm) virtio_mem_retry() argument
487 virtio_mem_translate_node_id(struct virtio_mem *vm, uint16_t node_id) virtio_mem_translate_node_id() argument
502 virtio_mem_overlaps_range(struct virtio_mem *vm, unsigned long start, unsigned long size) virtio_mem_overlaps_range() argument
516 virtio_mem_owned_mb(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_owned_mb() argument
521 virtio_mem_notify_going_online(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_notify_going_online() argument
536 virtio_mem_notify_offline(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_notify_offline() argument
563 virtio_mem_notify_online(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_notify_online() argument
587 virtio_mem_notify_going_offline(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_notify_going_offline() argument
615 virtio_mem_notify_cancel_offline(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_notify_cancel_offline() argument
646 struct virtio_mem *vm = container_of(nb, struct virtio_mem, virtio_mem_memory_notifier_cb() local
801 struct virtio_mem *vm; virtio_mem_online_page_cb() local
834 virtio_mem_send_request(struct virtio_mem *vm, const struct virtio_mem_req *req) virtio_mem_send_request() argument
864 virtio_mem_send_plug_request(struct virtio_mem *vm, uint64_t addr, uint64_t size) virtio_mem_send_plug_request() argument
892 virtio_mem_send_unplug_request(struct virtio_mem *vm, uint64_t addr, uint64_t size) virtio_mem_send_unplug_request() argument
918 virtio_mem_send_unplug_all_request(struct virtio_mem *vm) virtio_mem_send_unplug_all_request() argument
942 virtio_mem_mb_plug_sb(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_mb_plug_sb() argument
963 virtio_mem_mb_unplug_sb(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_mb_unplug_sb() argument
989 virtio_mem_mb_unplug_any_sb(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb) virtio_mem_mb_unplug_any_sb() argument
1028 virtio_mem_mb_unplug(struct virtio_mem *vm, unsigned long mb_id) virtio_mem_mb_unplug() argument
1038 virtio_mem_prepare_next_mb(struct virtio_mem *vm, unsigned long *mb_id) virtio_mem_prepare_next_mb() argument
1064 virtio_mem_too_many_mb_offline(struct virtio_mem *vm) virtio_mem_too_many_mb_offline() argument
1079 virtio_mem_mb_plug_and_add(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb) virtio_mem_mb_plug_and_add() argument
1139 virtio_mem_mb_plug_any_sb(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb, bool online) virtio_mem_mb_plug_any_sb() argument
1189 virtio_mem_plug_request(struct virtio_mem *vm, uint64_t diff) virtio_mem_plug_request() argument
1265 virtio_mem_mb_unplug_any_sb_offline(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb) virtio_mem_mb_unplug_any_sb_offline() argument
1303 virtio_mem_mb_unplug_sb_online(struct virtio_mem *vm, unsigned long mb_id, int sb_id, int count) virtio_mem_mb_unplug_sb_online() argument
1348 virtio_mem_mb_unplug_any_sb_online(struct virtio_mem *vm, unsigned long mb_id, uint64_t *nb_sb) virtio_mem_mb_unplug_any_sb_online() argument
1404 virtio_mem_unplug_request(struct virtio_mem *vm, uint64_t diff) virtio_mem_unplug_request() argument
1480 virtio_mem_unplug_pending_mb(struct virtio_mem *vm) virtio_mem_unplug_pending_mb() argument
1498 virtio_mem_refresh_config(struct virtio_mem *vm) virtio_mem_refresh_config() argument
1529 struct virtio_mem *vm = container_of(work, struct virtio_mem, wq); virtio_mem_run_wq() local
1602 struct virtio_mem *vm = container_of(timer, struct virtio_mem, virtio_mem_timer_expired() local
1613 struct virtio_mem *vm = vq->vdev->priv; virtio_mem_handle_response() local
1618 virtio_mem_init_vq(struct virtio_mem *vm) virtio_mem_init_vq() argument
1631 virtio_mem_init(struct virtio_mem *vm) virtio_mem_init() argument
1716 virtio_mem_create_resource(struct virtio_mem *vm) virtio_mem_create_resource() argument
1742 virtio_mem_delete_resource(struct virtio_mem *vm) virtio_mem_delete_resource() argument
1758 struct virtio_mem *vm; virtio_mem_probe() local
1834 struct virtio_mem *vm = vdev->priv; virtio_mem_remove() local
1901 struct virtio_mem *vm = vdev->priv; virtio_mem_config_changed() local
[all...]
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/lib/aarch64/
H A Dprocessor.c19 static uint64_t page_align(struct kvm_vm *vm, uint64_t v) in page_align() argument
21 return (v + vm->page_size) & ~(vm->page_size - 1); in page_align()
24 static uint64_t pgd_index(struct kvm_vm *vm, vm_vaddr_t gva) in pgd_index() argument
26 unsigned int shift = (vm->pgtable_levels - 1) * (vm->page_shift - 3) + vm->page_shift; in pgd_index()
27 uint64_t mask = (1UL << (vm->va_bits - shift)) - 1; in pgd_index()
32 static uint64_t pud_index(struct kvm_vm *vm, vm_vaddr_t gva) in pud_index() argument
34 unsigned int shift = 2 * (vm in pud_index()
43 pmd_index(struct kvm_vm *vm, vm_vaddr_t gva) pmd_index() argument
54 pte_index(struct kvm_vm *vm, vm_vaddr_t gva) pte_index() argument
60 pte_addr(struct kvm_vm *vm, uint64_t entry) pte_addr() argument
66 ptrs_per_pgd(struct kvm_vm *vm) ptrs_per_pgd() argument
72 ptrs_per_pte(struct kvm_vm *vm) ptrs_per_pte() argument
77 virt_pgd_alloc(struct kvm_vm *vm, uint32_t pgd_memslot) virt_pgd_alloc() argument
88 _virt_pg_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, uint32_t pgd_memslot, uint64_t flags) _virt_pg_map() argument
140 virt_pg_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, uint32_t pgd_memslot) virt_pg_map() argument
148 addr_gva2gpa(struct kvm_vm *vm, vm_vaddr_t gva) addr_gva2gpa() argument
186 pte_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent, uint64_t page, int level) pte_dump() argument
205 virt_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent) virt_dump() argument
227 struct kvm_vm *vm; vm_create_default() local
237 aarch64_vcpu_setup(struct kvm_vm *vm, int vcpuid, struct kvm_vcpu_init *init) aarch64_vcpu_setup() argument
304 vcpu_dump(FILE *stream, struct kvm_vm *vm, uint32_t vcpuid, uint8_t indent) vcpu_dump() argument
315 aarch64_vcpu_add_default(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_vcpu_init *init, void *guest_code) aarch64_vcpu_add_default() argument
331 vm_vcpu_add_default(struct kvm_vm *vm, uint32_t vcpuid, void *guest_code) vm_vcpu_add_default() argument
336 vcpu_args_set(struct kvm_vm *vm, uint32_t vcpuid, unsigned int num, ...) vcpu_args_set() argument
354 assert_on_unhandled_exception(struct kvm_vm *vm, uint32_t vcpuid) assert_on_unhandled_exception() argument
[all...]
/kernel/linux/linux-6.6/tools/testing/selftests/kvm/lib/aarch64/
H A Dprocessor.c20 static uint64_t page_align(struct kvm_vm *vm, uint64_t v) in page_align() argument
22 return (v + vm->page_size) & ~(vm->page_size - 1); in page_align()
25 static uint64_t pgd_index(struct kvm_vm *vm, vm_vaddr_t gva) in pgd_index() argument
27 unsigned int shift = (vm->pgtable_levels - 1) * (vm->page_shift - 3) + vm->page_shift; in pgd_index()
28 uint64_t mask = (1UL << (vm->va_bits - shift)) - 1; in pgd_index()
33 static uint64_t pud_index(struct kvm_vm *vm, vm_vaddr_t gva) in pud_index() argument
35 unsigned int shift = 2 * (vm in pud_index()
44 pmd_index(struct kvm_vm *vm, vm_vaddr_t gva) pmd_index() argument
55 pte_index(struct kvm_vm *vm, vm_vaddr_t gva) pte_index() argument
61 addr_pte(struct kvm_vm *vm, uint64_t pa, uint64_t attrs) addr_pte() argument
73 pte_addr(struct kvm_vm *vm, uint64_t pte) pte_addr() argument
84 ptrs_per_pgd(struct kvm_vm *vm) ptrs_per_pgd() argument
90 ptrs_per_pte(struct kvm_vm *vm) ptrs_per_pte() argument
95 virt_arch_pgd_alloc(struct kvm_vm *vm) virt_arch_pgd_alloc() argument
108 _virt_pg_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, uint64_t flags) _virt_pg_map() argument
153 virt_arch_pg_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr) virt_arch_pg_map() argument
160 virt_get_pte_hva(struct kvm_vm *vm, vm_vaddr_t gva) virt_get_pte_hva() argument
198 addr_arch_gva2gpa(struct kvm_vm *vm, vm_vaddr_t gva) addr_arch_gva2gpa() argument
205 pte_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent, uint64_t page, int level) pte_dump() argument
224 virt_arch_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent) virt_arch_dump() argument
244 struct kvm_vm *vm = vcpu->vm; aarch64_vcpu_setup() local
347 aarch64_vcpu_add(struct kvm_vm *vm, uint32_t vcpu_id, struct kvm_vcpu_init *init, void *guest_code) aarch64_vcpu_add() argument
368 vm_arch_vcpu_add(struct kvm_vm *vm, uint32_t vcpu_id, void *guest_code) vm_arch_vcpu_add() argument
461 vm_init_descriptor_tables(struct kvm_vm *vm) vm_init_descriptor_tables() argument
469 vm_install_sync_handler(struct kvm_vm *vm, int vector, int ec, void (*handler)(struct ex_regs *)) vm_install_sync_handler() argument
480 vm_install_exception_handler(struct kvm_vm *vm, int vector, void (*handler)(struct ex_regs *)) vm_install_exception_handler() argument
578 vm_vaddr_populate_bitmap(struct kvm_vm *vm) vm_vaddr_populate_bitmap() argument
[all...]
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/lib/
H A Dkvm_util.c69 * vm - Virtual Machine
78 int vm_enable_cap(struct kvm_vm *vm, struct kvm_enable_cap *cap) in vm_enable_cap() argument
82 ret = ioctl(vm->fd, KVM_ENABLE_CAP, cap); in vm_enable_cap()
92 * vm - Virtual Machine
102 int vcpu_enable_cap(struct kvm_vm *vm, uint32_t vcpu_id, in vcpu_enable_cap() argument
105 struct vcpu *vcpu = vcpu_find(vm, vcpu_id); in vcpu_enable_cap()
117 static void vm_open(struct kvm_vm *vm, int perm) in vm_open() argument
119 vm->kvm_fd = open(KVM_DEV_PATH, perm); in vm_open()
120 if (vm->kvm_fd < 0) in vm_open()
128 vm in vm_open()
185 struct kvm_vm *vm; vm_create() local
308 kvm_vm_get_dirty_log(struct kvm_vm *vm, int slot, void *log) kvm_vm_get_dirty_log() argument
318 kvm_vm_clear_dirty_log(struct kvm_vm *vm, int slot, void *log, uint64_t first_page, uint32_t num_pages) kvm_vm_clear_dirty_log() argument
351 userspace_mem_region_find(struct kvm_vm *vm, uint64_t start, uint64_t end) userspace_mem_region_find() argument
383 kvm_userspace_memory_region_find(struct kvm_vm *vm, uint64_t start, uint64_t end) kvm_userspace_memory_region_find() argument
411 vcpu_find(struct kvm_vm *vm, uint32_t vcpuid) vcpu_find() argument
467 __vm_mem_region_delete(struct kvm_vm *vm, struct userspace_mem_region *region) __vm_mem_region_delete() argument
533 kvm_memcmp_hva_gva(void *hva, struct kvm_vm *vm, vm_vaddr_t gva, size_t len) kvm_memcmp_hva_gva() argument
602 vm_userspace_mem_region_add(struct kvm_vm *vm, enum vm_mem_backing_src_type src_type, uint64_t guest_paddr, uint32_t slot, uint64_t npages, uint32_t flags) vm_userspace_mem_region_add() argument
744 memslot2region(struct kvm_vm *vm, uint32_t memslot) memslot2region() argument
775 vm_mem_region_set_flags(struct kvm_vm *vm, uint32_t slot, uint32_t flags) vm_mem_region_set_flags() argument
805 vm_mem_region_move(struct kvm_vm *vm, uint32_t slot, uint64_t new_gpa) vm_mem_region_move() argument
834 vm_mem_region_delete(struct kvm_vm *vm, uint32_t slot) vm_mem_region_delete() argument
884 vm_vcpu_add(struct kvm_vm *vm, uint32_t vcpuid) vm_vcpu_add() argument
937 vm_vaddr_unused_gap(struct kvm_vm *vm, size_t sz, vm_vaddr_t vaddr_min) vm_vaddr_unused_gap() argument
1024 vm_vaddr_alloc(struct kvm_vm *vm, size_t sz, vm_vaddr_t vaddr_min, uint32_t data_memslot, uint32_t pgd_memslot) vm_vaddr_alloc() argument
1071 virt_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, unsigned int npages, uint32_t pgd_memslot) virt_map() argument
1104 addr_gpa2hva(struct kvm_vm *vm, vm_paddr_t gpa) addr_gpa2hva() argument
1137 addr_hva2gpa(struct kvm_vm *vm, void *hva) addr_hva2gpa() argument
1166 vm_create_irqchip(struct kvm_vm *vm) vm_create_irqchip() argument
1192 vcpu_state(struct kvm_vm *vm, uint32_t vcpuid) vcpu_state() argument
1214 vcpu_run(struct kvm_vm *vm, uint32_t vcpuid) vcpu_run() argument
1221 _vcpu_run(struct kvm_vm *vm, uint32_t vcpuid) _vcpu_run() argument
1236 vcpu_run_complete_io(struct kvm_vm *vm, uint32_t vcpuid) vcpu_run_complete_io() argument
1252 vcpu_set_guest_debug(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_guest_debug *debug) vcpu_set_guest_debug() argument
1276 vcpu_set_mp_state(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_mp_state *mp_state) vcpu_set_mp_state() argument
1305 vcpu_get_reg_list(struct kvm_vm *vm, uint32_t vcpuid) vcpu_get_reg_list() argument
1333 vcpu_regs_get(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_regs *regs) vcpu_regs_get() argument
1360 vcpu_regs_set(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_regs *regs) vcpu_regs_set() argument
1373 vcpu_events_get(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_vcpu_events *events) vcpu_events_get() argument
1386 vcpu_events_set(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_vcpu_events *events) vcpu_events_set() argument
1401 vcpu_nested_state_get(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_nested_state *state) vcpu_nested_state_get() argument
1415 vcpu_nested_state_set(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_nested_state *state, bool ignore_error) vcpu_nested_state_set() argument
1449 vcpu_sregs_get(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_sregs *sregs) vcpu_sregs_get() argument
1476 vcpu_sregs_set(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_sregs *sregs) vcpu_sregs_set() argument
1483 _vcpu_sregs_set(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_sregs *sregs) _vcpu_sregs_set() argument
1492 vcpu_fpu_get(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_fpu *fpu) vcpu_fpu_get() argument
1501 vcpu_fpu_set(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_fpu *fpu) vcpu_fpu_set() argument
1510 vcpu_get_reg(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_one_reg *reg) vcpu_get_reg() argument
1519 vcpu_set_reg(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_one_reg *reg) vcpu_set_reg() argument
1541 vcpu_ioctl(struct kvm_vm *vm, uint32_t vcpuid, unsigned long cmd, void *arg) vcpu_ioctl() argument
1551 _vcpu_ioctl(struct kvm_vm *vm, uint32_t vcpuid, unsigned long cmd, void *arg) _vcpu_ioctl() argument
1576 vm_ioctl(struct kvm_vm *vm, unsigned long cmd, void *arg) vm_ioctl() argument
1600 vm_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent) vm_dump() argument
1708 vm_phy_pages_alloc(struct kvm_vm *vm, size_t num, vm_paddr_t paddr_min, uint32_t memslot) vm_phy_pages_alloc() argument
1748 vm_phy_page_alloc(struct kvm_vm *vm, vm_paddr_t paddr_min, uint32_t memslot) vm_phy_page_alloc() argument
1766 addr_gva2hva(struct kvm_vm *vm, vm_vaddr_t gva) addr_gva2hva() argument
1783 vm_is_unrestricted_guest(struct kvm_vm *vm) vm_is_unrestricted_guest() argument
1807 vm_get_page_size(struct kvm_vm *vm) vm_get_page_size() argument
1812 vm_get_page_shift(struct kvm_vm *vm) vm_get_page_shift() argument
1817 vm_get_max_gfn(struct kvm_vm *vm) vm_get_max_gfn() argument
1822 vm_get_fd(struct kvm_vm *vm) vm_get_fd() argument
[all...]
/kernel/linux/linux-6.6/tools/testing/selftests/kvm/s390x/
H A Dcmma_test.c102 static void create_main_memslot(struct kvm_vm *vm) in create_main_memslot() argument
106 vm_userspace_mem_region_add(vm, VM_MEM_SRC_ANONYMOUS, 0, 0, MAIN_PAGE_COUNT, 0); in create_main_memslot()
109 vm->memslots[i] = 0; in create_main_memslot()
112 static void create_test_memslot(struct kvm_vm *vm) in create_test_memslot() argument
114 vm_userspace_mem_region_add(vm, in create_test_memslot()
116 TEST_DATA_START_GFN << vm->page_shift, in create_test_memslot()
121 vm->memslots[MEM_REGION_TEST_DATA] = TEST_DATA_MEMSLOT; in create_test_memslot()
124 static void create_memslots(struct kvm_vm *vm) in create_memslots() argument
140 create_main_memslot(vm); in create_memslots()
141 create_test_memslot(vm); in create_memslots()
144 finish_vm_setup(struct kvm_vm *vm) finish_vm_setup() argument
158 struct kvm_vm *vm; create_vm_two_memslots() local
169 enable_cmma(struct kvm_vm *vm) enable_cmma() argument
177 enable_dirty_tracking(struct kvm_vm *vm) enable_dirty_tracking() argument
183 __enable_migration_mode(struct kvm_vm *vm) __enable_migration_mode() argument
192 enable_migration_mode(struct kvm_vm *vm) enable_migration_mode() argument
199 is_migration_mode_on(struct kvm_vm *vm) is_migration_mode_on() argument
213 vm_get_cmma_bits(struct kvm_vm *vm, u64 flags, int *errno_out) vm_get_cmma_bits() argument
234 struct kvm_vm *vm = create_vm_two_memslots(); test_get_cmma_basic() local
279 struct kvm_vm *vm = create_vm(); test_migration_mode() local
381 assert_all_slots_cmma_dirty(struct kvm_vm *vm) assert_all_slots_cmma_dirty() argument
431 assert_no_pages_cmma_dirty(struct kvm_vm *vm) assert_no_pages_cmma_dirty() argument
454 struct kvm_vm *vm = create_vm_two_memslots(); test_get_inital_dirty() local
478 query_cmma_range(struct kvm_vm *vm, u64 start_gfn, u64 gfn_count, struct kvm_s390_cmma_log *res_out) query_cmma_range() argument
511 struct kvm_vm *vm = create_vm_two_memslots(); test_get_skip_holes() local
673 struct kvm_vm *vm = create_vm(); machine_has_cmma() local
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/lima/
H A Dlima_vm.c18 struct lima_vm *vm; member
35 static void lima_vm_unmap_range(struct lima_vm *vm, u32 start, u32 end) in lima_vm_unmap_range() argument
43 vm->bts[pbe].cpu[bte] = 0; in lima_vm_unmap_range()
47 static int lima_vm_map_page(struct lima_vm *vm, dma_addr_t pa, u32 va) in lima_vm_map_page() argument
52 if (!vm->bts[pbe].cpu) { in lima_vm_map_page()
57 vm->bts[pbe].cpu = dma_alloc_wc( in lima_vm_map_page()
58 vm->dev->dev, LIMA_PAGE_SIZE << LIMA_VM_NUM_PT_PER_BT_SHIFT, in lima_vm_map_page()
59 &vm->bts[pbe].dma, GFP_KERNEL | __GFP_NOWARN | __GFP_ZERO); in lima_vm_map_page()
60 if (!vm->bts[pbe].cpu) in lima_vm_map_page()
63 pts = vm in lima_vm_map_page()
77 lima_vm_bo_find(struct lima_vm *vm, struct lima_bo *bo) lima_vm_bo_find() argument
91 lima_vm_bo_add(struct lima_vm *vm, struct lima_bo *bo, bool create) lima_vm_bo_add() argument
155 lima_vm_bo_del(struct lima_vm *vm, struct lima_bo *bo) lima_vm_bo_del() argument
185 lima_vm_get_va(struct lima_vm *vm, struct lima_bo *bo) lima_vm_get_va() argument
202 struct lima_vm *vm; lima_vm_create() local
237 struct lima_vm *vm = container_of(kref, struct lima_vm, refcount); lima_vm_release() local
254 lima_vm_print(struct lima_vm *vm) lima_vm_print() argument
283 lima_vm_map_bo(struct lima_vm *vm, struct lima_bo *bo, int pageoff) lima_vm_map_bo() argument
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/lima/
H A Dlima_vm.c18 struct lima_vm *vm; member
35 static void lima_vm_unmap_range(struct lima_vm *vm, u32 start, u32 end) in lima_vm_unmap_range() argument
43 vm->bts[pbe].cpu[bte] = 0; in lima_vm_unmap_range()
47 static int lima_vm_map_page(struct lima_vm *vm, dma_addr_t pa, u32 va) in lima_vm_map_page() argument
52 if (!vm->bts[pbe].cpu) { in lima_vm_map_page()
57 vm->bts[pbe].cpu = dma_alloc_wc( in lima_vm_map_page()
58 vm->dev->dev, LIMA_PAGE_SIZE << LIMA_VM_NUM_PT_PER_BT_SHIFT, in lima_vm_map_page()
59 &vm->bts[pbe].dma, GFP_KERNEL | __GFP_NOWARN | __GFP_ZERO); in lima_vm_map_page()
60 if (!vm->bts[pbe].cpu) in lima_vm_map_page()
63 pts = vm in lima_vm_map_page()
77 lima_vm_bo_find(struct lima_vm *vm, struct lima_bo *bo) lima_vm_bo_find() argument
91 lima_vm_bo_add(struct lima_vm *vm, struct lima_bo *bo, bool create) lima_vm_bo_add() argument
155 lima_vm_bo_del(struct lima_vm *vm, struct lima_bo *bo) lima_vm_bo_del() argument
185 lima_vm_get_va(struct lima_vm *vm, struct lima_bo *bo) lima_vm_get_va() argument
202 struct lima_vm *vm; lima_vm_create() local
237 struct lima_vm *vm = container_of(kref, struct lima_vm, refcount); lima_vm_release() local
254 lima_vm_print(struct lima_vm *vm) lima_vm_print() argument
283 lima_vm_map_bo(struct lima_vm *vm, struct lima_bo *bo, int pageoff) lima_vm_map_bo() argument
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_vm.c119 * @vm: pointer to the amdgpu_vm structure to set the fence sequence on
121 struct amdgpu_vm *vm; member
130 * amdgpu_vm_set_pasid - manage pasid and vm ptr mapping
133 * @vm: amdgpu_vm pointer
140 int amdgpu_vm_set_pasid(struct amdgpu_device *adev, struct amdgpu_vm *vm, in amdgpu_vm_set_pasid() argument
145 if (vm->pasid == pasid) in amdgpu_vm_set_pasid()
148 if (vm->pasid) { in amdgpu_vm_set_pasid()
149 r = xa_err(xa_erase_irq(&adev->vm_manager.pasids, vm->pasid)); in amdgpu_vm_set_pasid()
153 vm->pasid = 0; in amdgpu_vm_set_pasid()
157 r = xa_err(xa_store_irq(&adev->vm_manager.pasids, pasid, vm, in amdgpu_vm_set_pasid()
179 struct amdgpu_vm *vm = vm_bo->vm; amdgpu_vm_bo_evicted() local
277 amdgpu_vm_bo_reset_state_machine(struct amdgpu_vm *vm) amdgpu_vm_bo_reset_state_machine() argument
307 amdgpu_vm_bo_base_init(struct amdgpu_vm_bo_base *base, struct amdgpu_vm *vm, struct amdgpu_bo *bo) amdgpu_vm_bo_base_init() argument
352 amdgpu_vm_lock_pd(struct amdgpu_vm *vm, struct drm_exec *exec, unsigned int num_fences) amdgpu_vm_lock_pd() argument
369 amdgpu_vm_move_to_lru_tail(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_move_to_lru_tail() argument
378 amdgpu_vm_init_entities(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_init_entities() argument
399 amdgpu_vm_fini_entities(struct amdgpu_vm *vm) amdgpu_vm_fini_entities() argument
414 amdgpu_vm_generation(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_generation() argument
442 amdgpu_vm_validate_pt_bos(struct amdgpu_device *adev, struct amdgpu_vm *vm, int (*validate)(void *p, struct amdgpu_bo *bo), void *param) amdgpu_vm_validate_pt_bos() argument
506 amdgpu_vm_ready(struct amdgpu_vm *vm) amdgpu_vm_ready() argument
715 amdgpu_vm_bo_find(struct amdgpu_vm *vm, struct amdgpu_bo *bo) amdgpu_vm_bo_find() argument
768 amdgpu_vm_update_pdes(struct amdgpu_device *adev, struct amdgpu_vm *vm, bool immediate) amdgpu_vm_update_pdes() argument
863 amdgpu_vm_update_range(struct amdgpu_device *adev, struct amdgpu_vm *vm, bool immediate, bool unlocked, bool flush_tlb, struct dma_resv *resv, uint64_t start, uint64_t last, uint64_t flags, uint64_t offset, uint64_t vram_base, struct ttm_resource *res, dma_addr_t *pages_addr, struct dma_fence **fence) amdgpu_vm_update_range() argument
1010 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_get_memory() local
1029 amdgpu_vm_get_memory(struct amdgpu_vm *vm, struct amdgpu_mem_stats *stats) amdgpu_vm_get_memory() argument
1071 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_update() local
1280 amdgpu_vm_free_mapping(struct amdgpu_device *adev, struct amdgpu_vm *vm, struct amdgpu_bo_va_mapping *mapping, struct dma_fence *fence) amdgpu_vm_free_mapping() argument
1298 amdgpu_vm_prt_fini(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_prt_fini() argument
1326 amdgpu_vm_clear_freed(struct amdgpu_device *adev, struct amdgpu_vm *vm, struct dma_fence **fence) amdgpu_vm_clear_freed() argument
1380 amdgpu_vm_handle_moved(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_handle_moved() argument
1442 amdgpu_vm_bo_add(struct amdgpu_device *adev, struct amdgpu_vm *vm, struct amdgpu_bo *bo) amdgpu_vm_bo_add() argument
1486 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_insert_map() local
1527 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_map() local
1651 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_unmap() local
1700 amdgpu_vm_bo_clear_mappings(struct amdgpu_device *adev, struct amdgpu_vm *vm, uint64_t saddr, uint64_t size) amdgpu_vm_bo_clear_mappings() argument
1815 amdgpu_vm_bo_lookup_mapping(struct amdgpu_vm *vm, uint64_t addr) amdgpu_vm_bo_lookup_mapping() argument
1829 amdgpu_vm_bo_trace_cs(struct amdgpu_vm *vm, struct ww_acquire_ctx *ticket) amdgpu_vm_bo_trace_cs() argument
1866 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_del() local
1965 struct amdgpu_vm *vm = bo_base->vm; amdgpu_vm_bo_invalidate() local
2106 amdgpu_vm_wait_idle(struct amdgpu_vm *vm, long timeout) amdgpu_vm_wait_idle() argument
2129 amdgpu_vm_init(struct amdgpu_device *adev, struct amdgpu_vm *vm, int32_t xcp_id) amdgpu_vm_init() argument
2239 amdgpu_vm_make_compute(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_make_compute() argument
2312 amdgpu_vm_release_compute(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_release_compute() argument
2327 amdgpu_vm_fini(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_fini() argument
2507 struct amdgpu_vm *vm; amdgpu_vm_get_task_info() local
2524 amdgpu_vm_set_task_info(struct amdgpu_vm *vm) amdgpu_vm_set_task_info() argument
2560 struct amdgpu_vm *vm; amdgpu_vm_handle_fault() local
2650 amdgpu_debugfs_vm_bo_info(struct amdgpu_vm *vm, struct seq_file *m) amdgpu_debugfs_vm_bo_info() argument
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/selftests/
H A Dmock_gtt.c27 static void mock_insert_page(struct i915_address_space *vm, in mock_insert_page() argument
35 static void mock_insert_entries(struct i915_address_space *vm, in mock_insert_entries() argument
41 static void mock_bind_ppgtt(struct i915_address_space *vm, in mock_bind_ppgtt() argument
51 static void mock_unbind_ppgtt(struct i915_address_space *vm, in mock_unbind_ppgtt() argument
56 static void mock_cleanup(struct i915_address_space *vm) in mock_cleanup() argument
60 static void mock_clear_range(struct i915_address_space *vm, in mock_clear_range() argument
73 ppgtt->vm.gt = &i915->gt; in mock_ppgtt()
74 ppgtt->vm.i915 = i915; in mock_ppgtt()
75 ppgtt->vm.total = round_down(U64_MAX, PAGE_SIZE); in mock_ppgtt()
76 ppgtt->vm in mock_ppgtt()
96 mock_bind_ggtt(struct i915_address_space *vm, struct i915_vm_pt_stash *stash, struct i915_vma *vma, enum i915_cache_level cache_level, u32 flags) mock_bind_ggtt() argument
104 mock_unbind_ggtt(struct i915_address_space *vm, struct i915_vma *vma) mock_unbind_ggtt() argument
[all...]
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/lib/s390x/
H A Dprocessor.c18 void virt_pgd_alloc(struct kvm_vm *vm, uint32_t memslot) in virt_pgd_alloc() argument
22 TEST_ASSERT(vm->page_size == 4096, "Unsupported page size: 0x%x", in virt_pgd_alloc()
23 vm->page_size); in virt_pgd_alloc()
25 if (vm->pgd_created) in virt_pgd_alloc()
28 paddr = vm_phy_pages_alloc(vm, PAGES_PER_REGION, in virt_pgd_alloc()
30 memset(addr_gpa2hva(vm, paddr), 0xff, PAGES_PER_REGION * vm->page_size); in virt_pgd_alloc()
32 vm->pgd = paddr; in virt_pgd_alloc()
33 vm->pgd_created = true; in virt_pgd_alloc()
41 static uint64_t virt_alloc_region(struct kvm_vm *vm, in argument
54 virt_pg_map(struct kvm_vm *vm, uint64_t gva, uint64_t gpa, uint32_t memslot) virt_pg_map() argument
94 addr_gva2gpa(struct kvm_vm *vm, vm_vaddr_t gva) addr_gva2gpa() argument
119 virt_dump_ptes(FILE *stream, struct kvm_vm *vm, uint8_t indent, uint64_t ptea_start) virt_dump_ptes() argument
133 virt_dump_region(FILE *stream, struct kvm_vm *vm, uint8_t indent, uint64_t reg_tab_addr) virt_dump_region() argument
155 virt_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent) virt_dump() argument
172 struct kvm_vm *vm; vm_create_default() local
183 vm_vcpu_add_default(struct kvm_vm *vm, uint32_t vcpuid, void *guest_code) vm_vcpu_add_default() argument
214 vcpu_args_set(struct kvm_vm *vm, uint32_t vcpuid, unsigned int num, ...) vcpu_args_set() argument
234 vcpu_dump(FILE *stream, struct kvm_vm *vm, uint32_t vcpuid, uint8_t indent) vcpu_dump() argument
245 assert_on_unhandled_exception(struct kvm_vm *vm, uint32_t vcpuid) assert_on_unhandled_exception() argument
[all...]
/kernel/linux/linux-6.6/tools/testing/selftests/kvm/lib/s390x/
H A Dprocessor.c13 void virt_arch_pgd_alloc(struct kvm_vm *vm) in virt_arch_pgd_alloc() argument
17 TEST_ASSERT(vm->page_size == 4096, "Unsupported page size: 0x%x", in virt_arch_pgd_alloc()
18 vm->page_size); in virt_arch_pgd_alloc()
20 if (vm->pgd_created) in virt_arch_pgd_alloc()
23 paddr = vm_phy_pages_alloc(vm, PAGES_PER_REGION, in virt_arch_pgd_alloc()
25 vm->memslots[MEM_REGION_PT]); in virt_arch_pgd_alloc()
26 memset(addr_gpa2hva(vm, paddr), 0xff, PAGES_PER_REGION * vm->page_size); in virt_arch_pgd_alloc()
28 vm->pgd = paddr; in virt_arch_pgd_alloc()
29 vm in virt_arch_pgd_alloc()
37 virt_alloc_region(struct kvm_vm *vm, int ri) virt_alloc_region() argument
50 virt_arch_pg_map(struct kvm_vm *vm, uint64_t gva, uint64_t gpa) virt_arch_pg_map() argument
89 addr_arch_gva2gpa(struct kvm_vm *vm, vm_vaddr_t gva) addr_arch_gva2gpa() argument
114 virt_dump_ptes(FILE *stream, struct kvm_vm *vm, uint8_t indent, uint64_t ptea_start) virt_dump_ptes() argument
128 virt_dump_region(FILE *stream, struct kvm_vm *vm, uint8_t indent, uint64_t reg_tab_addr) virt_dump_region() argument
150 virt_arch_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent) virt_arch_dump() argument
158 vm_arch_vcpu_add(struct kvm_vm *vm, uint32_t vcpu_id, void *guest_code) vm_arch_vcpu_add() argument
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gt/
H A Dintel_ggtt.c52 struct drm_i915_private *i915 = ggtt->vm.i915; in ggtt_init_hw()
54 i915_address_space_init(&ggtt->vm, VM_CLASS_GGTT); in ggtt_init_hw()
56 ggtt->vm.is_ggtt = true; in ggtt_init_hw()
59 ggtt->vm.has_read_only = IS_VALLEYVIEW(i915); in ggtt_init_hw()
62 ggtt->vm.mm.color_adjust = i915_ggtt_color_adjust; in ggtt_init_hw()
68 ggtt->vm.cleanup(&ggtt->vm); in ggtt_init_hw()
104 * @vm: The VM to suspend the mappings for
109 void i915_ggtt_suspend_vm(struct i915_address_space *vm) in i915_ggtt_suspend_vm() argument
114 drm_WARN_ON(&vm in i915_ggtt_suspend_vm()
279 gen8_ggtt_insert_page(struct i915_address_space *vm, dma_addr_t addr, u64 offset, unsigned int pat_index, u32 flags) gen8_ggtt_insert_page() argument
294 gen8_ggtt_insert_entries(struct i915_address_space *vm, struct i915_vma_resource *vma_res, unsigned int pat_index, u32 flags) gen8_ggtt_insert_entries() argument
333 gen8_ggtt_clear_range(struct i915_address_space *vm, u64 start, u64 length) gen8_ggtt_clear_range() argument
354 gen6_ggtt_insert_page(struct i915_address_space *vm, dma_addr_t addr, u64 offset, unsigned int pat_index, u32 flags) gen6_ggtt_insert_page() argument
375 gen6_ggtt_insert_entries(struct i915_address_space *vm, struct i915_vma_resource *vma_res, unsigned int pat_index, u32 flags) gen6_ggtt_insert_entries() argument
408 nop_clear_range(struct i915_address_space *vm, u64 start, u64 length) nop_clear_range() argument
413 bxt_vtd_ggtt_wa(struct i915_address_space *vm) bxt_vtd_ggtt_wa() argument
426 struct i915_address_space *vm; global() member
443 bxt_vtd_ggtt_insert_page__BKL(struct i915_address_space *vm, dma_addr_t addr, u64 offset, unsigned int pat_index, u32 unused) bxt_vtd_ggtt_insert_page__BKL() argument
455 struct i915_address_space *vm; global() member
472 bxt_vtd_ggtt_insert_entries__BKL(struct i915_address_space *vm, struct i915_vma_resource *vma_res, unsigned int pat_index, u32 flags) bxt_vtd_ggtt_insert_entries__BKL() argument
482 gen6_ggtt_clear_range(struct i915_address_space *vm, u64 start, u64 length) gen6_ggtt_clear_range() argument
503 intel_ggtt_bind_vma(struct i915_address_space *vm, struct i915_vm_pt_stash *stash, struct i915_vma_resource *vma_res, unsigned int pat_index, u32 flags) intel_ggtt_bind_vma() argument
527 intel_ggtt_unbind_vma(struct i915_address_space *vm, struct i915_vma_resource *vma_res) intel_ggtt_unbind_vma() argument
680 aliasing_gtt_bind_vma(struct i915_address_space *vm, struct i915_vm_pt_stash *stash, struct i915_vma_resource *vma_res, unsigned int pat_index, u32 flags) aliasing_gtt_bind_vma() argument
703 aliasing_gtt_unbind_vma(struct i915_address_space *vm, struct i915_vma_resource *vma_res) aliasing_gtt_unbind_vma() argument
956 gen6_gmch_remove(struct i915_address_space *vm) gen6_gmch_remove() argument
1301 i915_ggtt_resume_vm(struct i915_address_space *vm) i915_ggtt_resume_vm() argument
[all...]
H A Dintel_gtt.c35 struct drm_i915_gem_object *alloc_pt_lmem(struct i915_address_space *vm, int sz) in alloc_pt_lmem() argument
51 obj = __i915_gem_object_create_lmem_with_ps(vm->i915, sz, sz, in alloc_pt_lmem()
52 vm->lmem_pt_obj_flags); in alloc_pt_lmem()
54 * Ensure all paging structures for this vm share the same dma-resv in alloc_pt_lmem()
59 obj->base.resv = i915_vm_resv_get(vm); in alloc_pt_lmem()
60 obj->shares_resv_from = vm; in alloc_pt_lmem()
66 struct drm_i915_gem_object *alloc_pt_dma(struct i915_address_space *vm, int sz) in alloc_pt_dma() argument
70 if (I915_SELFTEST_ONLY(should_fail(&vm->fault_attr, 1))) in alloc_pt_dma()
71 i915_gem_shrink_all(vm->i915); in alloc_pt_dma()
73 obj = i915_gem_object_create_internal(vm in alloc_pt_dma()
87 map_pt_dma(struct i915_address_space *vm, struct drm_i915_gem_object *obj) map_pt_dma() argument
101 map_pt_dma_locked(struct i915_address_space *vm, struct drm_i915_gem_object *obj) map_pt_dma_locked() argument
151 __i915_vm_close(struct i915_address_space *vm) __i915_vm_close() argument
166 i915_vm_lock_objects(struct i915_address_space *vm, struct i915_gem_ww_ctx *ww) i915_vm_lock_objects() argument
179 i915_address_space_fini(struct i915_address_space *vm) i915_address_space_fini() argument
194 struct i915_address_space *vm = i915_vm_resv_release() local
205 struct i915_address_space *vm = __i915_vm_release() local
221 struct i915_address_space *vm = i915_vm_release() local
230 i915_address_space_init(struct i915_address_space *vm, int subclass) i915_address_space_init() argument
328 setup_scratch_page(struct i915_address_space *vm) setup_scratch_page() argument
392 free_scratch(struct i915_address_space *vm) free_scratch() argument
664 __vm_create_scratch_for_read(struct i915_address_space *vm, unsigned long size) __vm_create_scratch_for_read() argument
685 __vm_create_scratch_for_read_pinned(struct i915_address_space *vm, unsigned long size) __vm_create_scratch_for_read_pinned() argument
[all...]
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/include/
H A Dkvm_util.h65 int vm_enable_cap(struct kvm_vm *vm, struct kvm_enable_cap *cap);
66 int vcpu_enable_cap(struct kvm_vm *vm, uint32_t vcpu_id,
68 void vm_enable_dirty_ring(struct kvm_vm *vm, uint32_t ring_size);
74 void kvm_vm_get_dirty_log(struct kvm_vm *vm, int slot, void *log);
75 void kvm_vm_clear_dirty_log(struct kvm_vm *vm, int slot, void *log,
78 int kvm_memcmp_hva_gva(void *hva, struct kvm_vm *vm, const vm_vaddr_t gva,
81 void kvm_vm_elf_load(struct kvm_vm *vm, const char *filename,
84 void vm_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent);
91 * vm - Virtual Machine
100 * given by @vm, t
[all...]
/kernel/linux/linux-5.10/sound/pci/ctxfi/
H A Dctvmem.c26 * Find or create vm block based on requested @size.
30 get_vm_block(struct ct_vm *vm, unsigned int size, struct ct_atc *atc) in get_vm_block() argument
36 if (size > vm->size) { in get_vm_block()
42 mutex_lock(&vm->lock); in get_vm_block()
43 list_for_each(pos, &vm->unused) { in get_vm_block()
48 if (pos == &vm->unused) in get_vm_block()
52 /* Move the vm node from unused list to used list directly */ in get_vm_block()
53 list_move(&entry->list, &vm->used); in get_vm_block()
54 vm->size -= size; in get_vm_block()
65 list_add(&block->list, &vm in get_vm_block()
75 put_vm_block(struct ct_vm *vm, struct ct_vm_block *block) put_vm_block() argument
124 ct_vm_map(struct ct_vm *vm, struct snd_pcm_substream *substream, int size) ct_vm_map() argument
152 ct_vm_unmap(struct ct_vm *vm, struct ct_vm_block *block) ct_vm_unmap() argument
164 ct_get_ptp_phys(struct ct_vm *vm, int index) ct_get_ptp_phys() argument
171 struct ct_vm *vm; ct_vm_create() local
215 ct_vm_destroy(struct ct_vm *vm) ct_vm_destroy() argument
[all...]
/kernel/linux/linux-6.6/sound/pci/ctxfi/
H A Dctvmem.c26 * Find or create vm block based on requested @size.
30 get_vm_block(struct ct_vm *vm, unsigned int size, struct ct_atc *atc) in get_vm_block() argument
36 if (size > vm->size) { in get_vm_block()
42 mutex_lock(&vm->lock); in get_vm_block()
43 list_for_each(pos, &vm->unused) { in get_vm_block()
48 if (pos == &vm->unused) in get_vm_block()
52 /* Move the vm node from unused list to used list directly */ in get_vm_block()
53 list_move(&entry->list, &vm->used); in get_vm_block()
54 vm->size -= size; in get_vm_block()
65 list_add(&block->list, &vm in get_vm_block()
75 put_vm_block(struct ct_vm *vm, struct ct_vm_block *block) put_vm_block() argument
124 ct_vm_map(struct ct_vm *vm, struct snd_pcm_substream *substream, int size) ct_vm_map() argument
152 ct_vm_unmap(struct ct_vm *vm, struct ct_vm_block *block) ct_vm_unmap() argument
164 ct_get_ptp_phys(struct ct_vm *vm, int index) ct_get_ptp_phys() argument
171 struct ct_vm *vm; ct_vm_create() local
215 ct_vm_destroy(struct ct_vm *vm) ct_vm_destroy() argument
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/selftests/
H A Dmock_gtt.c27 static void mock_insert_page(struct i915_address_space *vm, in mock_insert_page() argument
35 static void mock_insert_entries(struct i915_address_space *vm, in mock_insert_entries() argument
41 static void mock_bind_ppgtt(struct i915_address_space *vm, in mock_bind_ppgtt() argument
51 static void mock_unbind_ppgtt(struct i915_address_space *vm, in mock_unbind_ppgtt() argument
56 static void mock_cleanup(struct i915_address_space *vm) in mock_cleanup() argument
60 static void mock_clear_range(struct i915_address_space *vm, in mock_clear_range() argument
73 ppgtt->vm.gt = to_gt(i915); in mock_ppgtt()
74 ppgtt->vm.i915 = i915; in mock_ppgtt()
75 ppgtt->vm.total = round_down(U64_MAX, PAGE_SIZE); in mock_ppgtt()
76 ppgtt->vm in mock_ppgtt()
94 mock_bind_ggtt(struct i915_address_space *vm, struct i915_vm_pt_stash *stash, struct i915_vma_resource *vma_res, unsigned int pat_index, u32 flags) mock_bind_ggtt() argument
102 mock_unbind_ggtt(struct i915_address_space *vm, struct i915_vma_resource *vma_res) mock_unbind_ggtt() argument
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gt/
H A Dintel_ggtt.c44 struct drm_i915_private *i915 = ggtt->vm.i915; in ggtt_init_hw()
46 i915_address_space_init(&ggtt->vm, VM_CLASS_GGTT); in ggtt_init_hw()
48 ggtt->vm.is_ggtt = true; in ggtt_init_hw()
51 ggtt->vm.has_read_only = IS_VALLEYVIEW(i915); in ggtt_init_hw()
54 ggtt->vm.mm.color_adjust = i915_ggtt_color_adjust; in ggtt_init_hw()
60 ggtt->vm.cleanup(&ggtt->vm); in ggtt_init_hw()
112 mutex_lock(&ggtt->vm.mutex); in i915_ggtt_suspend()
115 open = atomic_xchg(&ggtt->vm.open, 0); in i915_ggtt_suspend()
117 list_for_each_entry_safe(vma, vn, &ggtt->vm in i915_ggtt_suspend()
191 gen8_ggtt_insert_page(struct i915_address_space *vm, dma_addr_t addr, u64 offset, enum i915_cache_level level, u32 unused) gen8_ggtt_insert_page() argument
206 gen8_ggtt_insert_entries(struct i915_address_space *vm, struct i915_vma *vma, enum i915_cache_level level, u32 flags) gen8_ggtt_insert_entries() argument
242 gen6_ggtt_insert_page(struct i915_address_space *vm, dma_addr_t addr, u64 offset, enum i915_cache_level level, u32 flags) gen6_ggtt_insert_page() argument
263 gen6_ggtt_insert_entries(struct i915_address_space *vm, struct i915_vma *vma, enum i915_cache_level level, u32 flags) gen6_ggtt_insert_entries() argument
293 nop_clear_range(struct i915_address_space *vm, u64 start, u64 length) nop_clear_range() argument
298 gen8_ggtt_clear_range(struct i915_address_space *vm, u64 start, u64 length) gen8_ggtt_clear_range() argument
319 bxt_vtd_ggtt_wa(struct i915_address_space *vm) bxt_vtd_ggtt_wa() argument
332 struct i915_address_space *vm; global() member
348 bxt_vtd_ggtt_insert_page__BKL(struct i915_address_space *vm, dma_addr_t addr, u64 offset, enum i915_cache_level level, u32 unused) bxt_vtd_ggtt_insert_page__BKL() argument
360 struct i915_address_space *vm; global() member
376 bxt_vtd_ggtt_insert_entries__BKL(struct i915_address_space *vm, struct i915_vma *vma, enum i915_cache_level level, u32 flags) bxt_vtd_ggtt_insert_entries__BKL() argument
386 gen6_ggtt_clear_range(struct i915_address_space *vm, u64 start, u64 length) gen6_ggtt_clear_range() argument
407 i915_ggtt_insert_page(struct i915_address_space *vm, dma_addr_t addr, u64 offset, enum i915_cache_level cache_level, u32 unused) i915_ggtt_insert_page() argument
419 i915_ggtt_insert_entries(struct i915_address_space *vm, struct i915_vma *vma, enum i915_cache_level cache_level, u32 unused) i915_ggtt_insert_entries() argument
431 i915_ggtt_clear_range(struct i915_address_space *vm, u64 start, u64 length) i915_ggtt_clear_range() argument
437 ggtt_bind_vma(struct i915_address_space *vm, struct i915_vm_pt_stash *stash, struct i915_vma *vma, enum i915_cache_level cache_level, u32 flags) ggtt_bind_vma() argument
458 ggtt_unbind_vma(struct i915_address_space *vm, struct i915_vma *vma) ggtt_unbind_vma() argument
591 aliasing_gtt_bind_vma(struct i915_address_space *vm, struct i915_vm_pt_stash *stash, struct i915_vma *vma, enum i915_cache_level cache_level, u32 flags) aliasing_gtt_bind_vma() argument
612 aliasing_gtt_unbind_vma(struct i915_address_space *vm, struct i915_vma *vma) aliasing_gtt_unbind_vma() argument
839 gen6_gmch_remove(struct i915_address_space *vm) gen6_gmch_remove() argument
1052 i915_gmch_remove(struct i915_address_space *vm) i915_gmch_remove() argument
[all...]
/kernel/linux/linux-6.6/drivers/virt/acrn/
H A Dvm.c25 struct acrn_vm *acrn_vm_create(struct acrn_vm *vm, in acrn_vm_create() argument
37 mutex_init(&vm->regions_mapping_lock); in acrn_vm_create()
38 INIT_LIST_HEAD(&vm->ioreq_clients); in acrn_vm_create()
39 spin_lock_init(&vm->ioreq_clients_lock); in acrn_vm_create()
40 vm->vmid = vm_param->vmid; in acrn_vm_create()
41 vm->vcpu_num = vm_param->vcpu_num; in acrn_vm_create()
43 if (acrn_ioreq_init(vm, vm_param->ioreq_buf) < 0) { in acrn_vm_create()
45 vm->vmid = ACRN_INVALID_VMID; in acrn_vm_create()
50 list_add(&vm->list, &acrn_vm_list); in acrn_vm_create()
53 acrn_ioeventfd_init(vm); in acrn_vm_create()
59 acrn_vm_destroy(struct acrn_vm *vm) acrn_vm_destroy() argument
104 acrn_msi_inject(struct acrn_vm *vm, u64 msi_addr, u64 msi_data) acrn_msi_inject() argument
[all...]
H A Dioeventfd.c43 static void acrn_ioeventfd_shutdown(struct acrn_vm *vm, struct hsm_ioeventfd *p) in acrn_ioeventfd_shutdown() argument
45 lockdep_assert_held(&vm->ioeventfds_lock); in acrn_ioeventfd_shutdown()
52 static bool hsm_ioeventfd_is_conflict(struct acrn_vm *vm, in hsm_ioeventfd_is_conflict() argument
57 lockdep_assert_held(&vm->ioeventfds_lock); in hsm_ioeventfd_is_conflict()
60 list_for_each_entry(p, &vm->ioeventfds, list) in hsm_ioeventfd_is_conflict()
76 static int acrn_ioeventfd_assign(struct acrn_vm *vm, in acrn_ioeventfd_assign() argument
121 mutex_lock(&vm->ioeventfds_lock); in acrn_ioeventfd_assign()
123 if (hsm_ioeventfd_is_conflict(vm, p)) { in acrn_ioeventfd_assign()
129 ret = acrn_ioreq_range_add(vm->ioeventfd_client, p->type, in acrn_ioeventfd_assign()
134 list_add_tail(&p->list, &vm in acrn_ioeventfd_assign()
147 acrn_ioeventfd_deassign(struct acrn_vm *vm, struct acrn_ioeventfd *args) acrn_ioeventfd_deassign() argument
173 hsm_ioeventfd_match(struct acrn_vm *vm, u64 addr, u64 data, int len, int type) hsm_ioeventfd_match() argument
232 acrn_ioeventfd_config(struct acrn_vm *vm, struct acrn_ioeventfd *args) acrn_ioeventfd_config() argument
244 acrn_ioeventfd_init(struct acrn_vm *vm) acrn_ioeventfd_init() argument
263 acrn_ioeventfd_deinit(struct acrn_vm *vm) acrn_ioeventfd_deinit() argument
[all...]
H A Dirqfd.c23 * @vm: Associated VM pointer
32 struct acrn_vm *vm; member
43 struct acrn_vm *vm = irqfd->vm; in acrn_irqfd_inject() local
45 acrn_msi_inject(vm, irqfd->msi.msi_addr, in acrn_irqfd_inject()
53 lockdep_assert_held(&irqfd->vm->irqfds_lock); in hsm_irqfd_shutdown()
65 struct acrn_vm *vm; in hsm_irqfd_shutdown_work() local
68 vm = irqfd->vm; in hsm_irqfd_shutdown_work()
69 mutex_lock(&vm in hsm_irqfd_shutdown_work()
81 struct acrn_vm *vm; hsm_irqfd_wakeup() local
110 acrn_irqfd_assign(struct acrn_vm *vm, struct acrn_irqfd *args) acrn_irqfd_assign() argument
177 acrn_irqfd_deassign(struct acrn_vm *vm, struct acrn_irqfd *args) acrn_irqfd_deassign() argument
200 acrn_irqfd_config(struct acrn_vm *vm, struct acrn_irqfd *args) acrn_irqfd_config() argument
212 acrn_irqfd_init(struct acrn_vm *vm) acrn_irqfd_init() argument
224 acrn_irqfd_deinit(struct acrn_vm *vm) acrn_irqfd_deinit() argument
[all...]
H A Dioreq.c39 static int ioreq_complete_request(struct acrn_vm *vm, u16 vcpu, in ioreq_complete_request() argument
64 ret = hcall_notify_req_finish(vm->vmid, vcpu); in ioreq_complete_request()
79 if (vcpu >= client->vm->vcpu_num) in acrn_ioreq_complete_request()
84 acrn_req = (struct acrn_io_request *)client->vm->ioreq_buf; in acrn_ioreq_complete_request()
88 ret = ioreq_complete_request(client->vm, vcpu, acrn_req); in acrn_ioreq_complete_request()
93 int acrn_ioreq_request_default_complete(struct acrn_vm *vm, u16 vcpu) in acrn_ioreq_request_default_complete() argument
97 spin_lock_bh(&vm->ioreq_clients_lock); in acrn_ioreq_request_default_complete()
98 if (vm->default_client) in acrn_ioreq_request_default_complete()
99 ret = acrn_ioreq_complete_request(vm->default_client, in acrn_ioreq_request_default_complete()
101 spin_unlock_bh(&vm in acrn_ioreq_request_default_complete()
212 acrn_ioreq_request_clear(struct acrn_vm *vm) acrn_ioreq_request_clear() argument
312 handle_cf8cfc(struct acrn_vm *vm, struct acrn_io_request *req, u16 vcpu) handle_cf8cfc() argument
381 find_ioreq_client(struct acrn_vm *vm, struct acrn_io_request *req) find_ioreq_client() argument
415 acrn_ioreq_client_create(struct acrn_vm *vm, ioreq_handler_t handler, void *priv, bool is_default, const char *name) acrn_ioreq_client_create() argument
468 struct acrn_vm *vm = client->vm; acrn_ioreq_client_destroy() local
497 acrn_ioreq_dispatch(struct acrn_vm *vm) acrn_ioreq_dispatch() argument
546 struct acrn_vm *vm; ioreq_dispatcher() local
596 acrn_ioreq_init(struct acrn_vm *vm, u64 buf_vma) acrn_ioreq_init() argument
636 acrn_ioreq_deinit(struct acrn_vm *vm) acrn_ioreq_deinit() argument
[all...]
/kernel/linux/linux-6.6/tools/testing/selftests/kvm/lib/
H A Dkvm_util.c128 void vm_enable_dirty_ring(struct kvm_vm *vm, uint32_t ring_size) in vm_enable_dirty_ring() argument
130 if (vm_check_cap(vm, KVM_CAP_DIRTY_LOG_RING_ACQ_REL)) in vm_enable_dirty_ring()
131 vm_enable_cap(vm, KVM_CAP_DIRTY_LOG_RING_ACQ_REL, ring_size); in vm_enable_dirty_ring()
133 vm_enable_cap(vm, KVM_CAP_DIRTY_LOG_RING, ring_size); in vm_enable_dirty_ring()
134 vm->dirty_ring_size = ring_size; in vm_enable_dirty_ring()
137 static void vm_open(struct kvm_vm *vm) in vm_open() argument
139 vm->kvm_fd = _open_kvm_dev_path_or_exit(O_RDWR); in vm_open()
143 vm->fd = __kvm_ioctl(vm->kvm_fd, KVM_CREATE_VM, (void *)vm in vm_open()
203 vm_vaddr_populate_bitmap(struct kvm_vm *vm) vm_vaddr_populate_bitmap() argument
214 struct kvm_vm *vm; ____vm_create() local
356 struct kvm_vm *vm; __vm_create() local
407 struct kvm_vm *vm; __vm_create_with_vcpus() local
425 struct kvm_vm *vm; __vm_create_with_one_vcpu() local
468 vm_arch_vcpu_recreate(struct kvm_vm *vm, uint32_t vcpu_id) vm_arch_vcpu_recreate() argument
474 vm_recreate_with_one_vcpu(struct kvm_vm *vm) vm_recreate_with_one_vcpu() argument
571 userspace_mem_region_find(struct kvm_vm *vm, uint64_t start, uint64_t end) userspace_mem_region_find() argument
610 kvm_userspace_memory_region_find(struct kvm_vm *vm, uint64_t start, uint64_t end) kvm_userspace_memory_region_find() argument
639 vm_vcpu_rm(struct kvm_vm *vm, struct kvm_vcpu *vcpu) vm_vcpu_rm() argument
676 __vm_mem_region_delete(struct kvm_vm *vm, struct userspace_mem_region *region, bool unlink) __vm_mem_region_delete() argument
779 kvm_memcmp_hva_gva(void *hva, struct kvm_vm *vm, vm_vaddr_t gva, size_t len) kvm_memcmp_hva_gva() argument
878 __vm_set_user_memory_region(struct kvm_vm *vm, uint32_t slot, uint32_t flags, uint64_t gpa, uint64_t size, void *hva) __vm_set_user_memory_region() argument
892 vm_set_user_memory_region(struct kvm_vm *vm, uint32_t slot, uint32_t flags, uint64_t gpa, uint64_t size, void *hva) vm_set_user_memory_region() argument
923 vm_userspace_mem_region_add(struct kvm_vm *vm, enum vm_mem_backing_src_type src_type, uint64_t guest_paddr, uint32_t slot, uint64_t npages, uint32_t flags) vm_userspace_mem_region_add() argument
1089 memslot2region(struct kvm_vm *vm, uint32_t memslot) memslot2region() argument
1120 vm_mem_region_set_flags(struct kvm_vm *vm, uint32_t slot, uint32_t flags) vm_mem_region_set_flags() argument
1150 vm_mem_region_move(struct kvm_vm *vm, uint32_t slot, uint64_t new_gpa) vm_mem_region_move() argument
1179 vm_mem_region_delete(struct kvm_vm *vm, uint32_t slot) vm_mem_region_delete() argument
1200 vcpu_exists(struct kvm_vm *vm, uint32_t vcpu_id) vcpu_exists() argument
1216 __vm_vcpu_add(struct kvm_vm *vm, uint32_t vcpu_id) __vm_vcpu_add() argument
1266 vm_vaddr_unused_gap(struct kvm_vm *vm, size_t sz, vm_vaddr_t vaddr_min) vm_vaddr_unused_gap() argument
1332 __vm_vaddr_alloc(struct kvm_vm *vm, size_t sz, vm_vaddr_t vaddr_min, enum kvm_mem_region_type type) __vm_vaddr_alloc() argument
1379 vm_vaddr_alloc(struct kvm_vm *vm, size_t sz, vm_vaddr_t vaddr_min) vm_vaddr_alloc() argument
1398 vm_vaddr_alloc_pages(struct kvm_vm *vm, int nr_pages) vm_vaddr_alloc_pages() argument
1403 __vm_vaddr_alloc_page(struct kvm_vm *vm, enum kvm_mem_region_type type) __vm_vaddr_alloc_page() argument
1422 vm_vaddr_alloc_page(struct kvm_vm *vm) vm_vaddr_alloc_page() argument
1443 virt_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, unsigned int npages) virt_map() argument
1478 addr_gpa2hva(struct kvm_vm *vm, vm_paddr_t gpa) addr_gpa2hva() argument
1509 addr_hva2gpa(struct kvm_vm *vm, void *hva) addr_hva2gpa() argument
1552 addr_gpa2alias(struct kvm_vm *vm, vm_paddr_t gpa) addr_gpa2alias() argument
1569 vm_create_irqchip(struct kvm_vm *vm) vm_create_irqchip() argument
1676 __kvm_test_create_device(struct kvm_vm *vm, uint64_t type) __kvm_test_create_device() argument
1686 __kvm_create_device(struct kvm_vm *vm, uint64_t type) __kvm_create_device() argument
1728 _kvm_irq_line(struct kvm_vm *vm, uint32_t irq, int level) _kvm_irq_line() argument
1738 kvm_irq_line(struct kvm_vm *vm, uint32_t irq, int level) kvm_irq_line() argument
1776 _kvm_gsi_routing_write(struct kvm_vm *vm, struct kvm_irq_routing *routing) _kvm_gsi_routing_write() argument
1787 kvm_gsi_routing_write(struct kvm_vm *vm, struct kvm_irq_routing *routing) kvm_gsi_routing_write() argument
1810 vm_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent) vm_dump() argument
1940 vm_phy_pages_alloc(struct kvm_vm *vm, size_t num, vm_paddr_t paddr_min, uint32_t memslot) vm_phy_pages_alloc() argument
1980 vm_phy_page_alloc(struct kvm_vm *vm, vm_paddr_t paddr_min, uint32_t memslot) vm_phy_page_alloc() argument
1986 vm_alloc_page_table(struct kvm_vm *vm) vm_alloc_page_table() argument
2004 addr_gva2hva(struct kvm_vm *vm, vm_vaddr_t gva) addr_gva2hva() argument
2009 vm_compute_max_gfn(struct kvm_vm *vm) vm_compute_max_gfn() argument
2135 __vm_get_stat(struct kvm_vm *vm, const char *stat_name, uint64_t *data, size_t max_elements) __vm_get_stat() argument
2164 kvm_arch_vm_post_create(struct kvm_vm *vm) kvm_arch_vm_post_create() argument
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_vm.c88 * vm eviction_lock can be taken in MMU notifiers. Make sure no reclaim-FS
92 static inline void amdgpu_vm_eviction_lock(struct amdgpu_vm *vm) in amdgpu_vm_eviction_lock() argument
94 mutex_lock(&vm->eviction_lock); in amdgpu_vm_eviction_lock()
95 vm->saved_flags = memalloc_nofs_save(); in amdgpu_vm_eviction_lock()
98 static inline int amdgpu_vm_eviction_trylock(struct amdgpu_vm *vm) in amdgpu_vm_eviction_trylock() argument
100 if (mutex_trylock(&vm->eviction_lock)) { in amdgpu_vm_eviction_trylock()
101 vm->saved_flags = memalloc_nofs_save(); in amdgpu_vm_eviction_trylock()
107 static inline void amdgpu_vm_eviction_unlock(struct amdgpu_vm *vm) in amdgpu_vm_eviction_unlock() argument
109 memalloc_nofs_restore(vm->saved_flags); in amdgpu_vm_eviction_unlock()
110 mutex_unlock(&vm in amdgpu_vm_eviction_unlock()
225 struct amdgpu_vm *vm = vm_bo->vm; amdgpu_vm_bo_evicted() local
317 amdgpu_vm_bo_base_init(struct amdgpu_vm_bo_base *base, struct amdgpu_vm *vm, struct amdgpu_bo *bo) amdgpu_vm_bo_base_init() argument
390 amdgpu_vm_pt_start(struct amdgpu_device *adev, struct amdgpu_vm *vm, uint64_t start, struct amdgpu_vm_pt_cursor *cursor) amdgpu_vm_pt_start() argument
516 amdgpu_vm_pt_first_dfs(struct amdgpu_device *adev, struct amdgpu_vm *vm, struct amdgpu_vm_pt_cursor *start, struct amdgpu_vm_pt_cursor *cursor) amdgpu_vm_pt_first_dfs() argument
584 amdgpu_vm_get_pd_bo(struct amdgpu_vm *vm, struct list_head *validated, struct amdgpu_bo_list_entry *entry) amdgpu_vm_get_pd_bo() argument
619 struct amdgpu_vm *vm = bo_base->vm; amdgpu_vm_del_from_lru_notify() local
635 amdgpu_vm_move_to_lru_tail(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_move_to_lru_tail() argument
679 amdgpu_vm_validate_pt_bos(struct amdgpu_device *adev, struct amdgpu_vm *vm, int (*validate)(void *p, struct amdgpu_bo *bo), void *param) amdgpu_vm_validate_pt_bos() argument
720 amdgpu_vm_ready(struct amdgpu_vm *vm) amdgpu_vm_ready() argument
744 amdgpu_vm_clear_bo(struct amdgpu_device *adev, struct amdgpu_vm *vm, struct amdgpu_bo *bo, bool immediate) amdgpu_vm_clear_bo() argument
864 amdgpu_vm_bo_param(struct amdgpu_device *adev, struct amdgpu_vm *vm, int level, bool immediate, struct amdgpu_bo_param *bp) amdgpu_vm_bo_param() argument
900 amdgpu_vm_alloc_pts(struct amdgpu_device *adev, struct amdgpu_vm *vm, struct amdgpu_vm_pt_cursor *cursor, bool immediate) amdgpu_vm_alloc_pts() argument
974 amdgpu_vm_free_pts(struct amdgpu_device *adev, struct amdgpu_vm *vm, struct amdgpu_vm_pt_cursor *start) amdgpu_vm_free_pts() argument
1195 amdgpu_vm_bo_find(struct amdgpu_vm *vm, struct amdgpu_bo *bo) amdgpu_vm_bo_find() argument
1245 amdgpu_vm_update_pde(struct amdgpu_vm_update_params *params, struct amdgpu_vm *vm, struct amdgpu_vm_pt *entry) amdgpu_vm_update_pde() argument
1271 amdgpu_vm_invalidate_pds(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_invalidate_pds() argument
1294 amdgpu_vm_update_pdes(struct amdgpu_device *adev, struct amdgpu_vm *vm, bool immediate) amdgpu_vm_update_pdes() argument
1524 struct amdgpu_vm *vm = params->vm; amdgpu_vm_update_ptes() local
1596 amdgpu_vm_bo_update_mapping(struct amdgpu_device *adev, struct amdgpu_vm *vm, bool immediate, bool unlocked, struct dma_resv *resv, uint64_t start, uint64_t last, uint64_t flags, uint64_t addr, dma_addr_t *pages_addr, struct dma_fence **fence) amdgpu_vm_bo_update_mapping() argument
1671 amdgpu_vm_bo_split_mapping(struct amdgpu_device *adev, struct dma_resv *resv, dma_addr_t *pages_addr, struct amdgpu_vm *vm, struct amdgpu_bo_va_mapping *mapping, uint64_t flags, struct amdgpu_device *bo_adev, struct drm_mm_node *nodes, struct dma_fence **fence) amdgpu_vm_bo_split_mapping() argument
1782 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_update() local
1972 amdgpu_vm_free_mapping(struct amdgpu_device *adev, struct amdgpu_vm *vm, struct amdgpu_bo_va_mapping *mapping, struct dma_fence *fence) amdgpu_vm_free_mapping() argument
1990 amdgpu_vm_prt_fini(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_prt_fini() argument
2035 amdgpu_vm_clear_freed(struct amdgpu_device *adev, struct amdgpu_vm *vm, struct dma_fence **fence) amdgpu_vm_clear_freed() argument
2088 amdgpu_vm_handle_moved(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_handle_moved() argument
2145 amdgpu_vm_bo_add(struct amdgpu_device *adev, struct amdgpu_vm *vm, struct amdgpu_bo *bo) amdgpu_vm_bo_add() argument
2187 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_insert_map() local
2259 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_map() local
2368 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_unmap() local
2417 amdgpu_vm_bo_clear_mappings(struct amdgpu_device *adev, struct amdgpu_vm *vm, uint64_t saddr, uint64_t size) amdgpu_vm_bo_clear_mappings() argument
2524 amdgpu_vm_bo_lookup_mapping(struct amdgpu_vm *vm, uint64_t addr) amdgpu_vm_bo_lookup_mapping() argument
2538 amdgpu_vm_bo_trace_cs(struct amdgpu_vm *vm, struct ww_acquire_ctx *ticket) amdgpu_vm_bo_trace_cs() argument
2575 struct amdgpu_vm *vm = bo_va->base.vm; amdgpu_vm_bo_rmv() local
2671 struct amdgpu_vm *vm = bo_base->vm; amdgpu_vm_bo_invalidate() local
2812 amdgpu_vm_wait_idle(struct amdgpu_vm *vm, long timeout) amdgpu_vm_wait_idle() argument
2835 amdgpu_vm_init(struct amdgpu_device *adev, struct amdgpu_vm *vm, int vm_context, u32 pasid) amdgpu_vm_init() argument
2967 amdgpu_vm_check_clean_reserved(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_check_clean_reserved() argument
3006 amdgpu_vm_make_compute(struct amdgpu_device *adev, struct amdgpu_vm *vm, u32 pasid) amdgpu_vm_make_compute() argument
3111 amdgpu_vm_release_compute(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_release_compute() argument
3133 amdgpu_vm_fini(struct amdgpu_device *adev, struct amdgpu_vm *vm) amdgpu_vm_fini() argument
3322 struct amdgpu_vm *vm; amdgpu_vm_get_task_info() local
3339 amdgpu_vm_set_task_info(struct amdgpu_vm *vm) amdgpu_vm_set_task_info() argument
3368 struct amdgpu_vm *vm; amdgpu_vm_handle_fault() local
[all...]

Completed in 19 milliseconds

12345678910>>...30