/third_party/alsa-lib/src/rawmidi/ |
H A D | rawmidi_virt.c | 62 snd_rawmidi_virtual_t *virt = rmidi->private_data; in snd_rawmidi_virtual_close() local 63 virt->open--; in snd_rawmidi_virtual_close() 64 if (virt->open) in snd_rawmidi_virtual_close() 66 snd_seq_close(virt->handle); in snd_rawmidi_virtual_close() 67 if (virt->midi_event) in snd_rawmidi_virtual_close() 68 snd_midi_event_free(virt->midi_event); in snd_rawmidi_virtual_close() 69 free(virt); in snd_rawmidi_virtual_close() 75 snd_rawmidi_virtual_t *virt = rmidi->private_data; in snd_rawmidi_virtual_nonblock() local 77 return snd_seq_nonblock(virt->handle, nonblock); in snd_rawmidi_virtual_nonblock() 82 // snd_rawmidi_virtual_t *virt in snd_rawmidi_virtual_info() 98 snd_rawmidi_virtual_input_params(snd_rawmidi_virtual_t *virt, snd_rawmidi_params_t *params) snd_rawmidi_virtual_input_params() argument 118 snd_rawmidi_virtual_output_params(snd_rawmidi_virtual_t *virt, snd_rawmidi_params_t *params) snd_rawmidi_virtual_output_params() argument 139 snd_rawmidi_virtual_t *virt = rmidi->private_data; snd_rawmidi_virtual_params() local 158 snd_rawmidi_virtual_t *virt = rmidi->private_data; snd_rawmidi_virtual_drop() local 173 snd_rawmidi_virtual_t *virt = rmidi->private_data; snd_rawmidi_virtual_drain() local 191 snd_rawmidi_virtual_t *virt = rmidi->private_data; snd_rawmidi_virtual_write() local 236 snd_rawmidi_virtual_t *virt = rmidi->private_data; snd_rawmidi_virtual_read() local 317 snd_rawmidi_virtual_t *virt = NULL; snd_rawmidi_virtual_open() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_virt.c | 114 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_full_gpu() local 117 if (virt->ops && virt->ops->req_full_gpu) { in amdgpu_virt_request_full_gpu() 118 r = virt->ops->req_full_gpu(adev, init); in amdgpu_virt_request_full_gpu() 122 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_request_full_gpu() 137 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_full_gpu() local 140 if (virt->ops && virt in amdgpu_virt_release_full_gpu() 158 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_reset_gpu() local 174 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_request_init_data() local 193 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_wait_reset() local 270 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_init_ras_err_handler_data() local 305 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_ras_release_bp() local 323 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_release_ras_err_handler_data() local 342 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_ras_add_bps() local 354 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_ras_reserve_bps() local 386 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_ras_check_bad_page() local [all...] |
H A D | amdgpu_vf_error.c | 41 mutex_lock(&adev->virt.vf_errors.lock); in amdgpu_vf_error_put() 42 index = adev->virt.vf_errors.write_count % AMDGPU_VF_ERROR_ENTRY_SIZE; in amdgpu_vf_error_put() 43 adev->virt.vf_errors.code [index] = error_code; in amdgpu_vf_error_put() 44 adev->virt.vf_errors.flags [index] = error_flags; in amdgpu_vf_error_put() 45 adev->virt.vf_errors.data [index] = error_data; in amdgpu_vf_error_put() 46 adev->virt.vf_errors.write_count ++; in amdgpu_vf_error_put() 47 mutex_unlock(&adev->virt.vf_errors.lock); in amdgpu_vf_error_put() 58 (!adev->virt.ops) || (!adev->virt.ops->trans_msg)) { in amdgpu_vf_error_trans_all() 69 mutex_lock(&adev->virt in amdgpu_vf_error_trans_all() [all...] |
H A D | mxgpu_nv.c | 178 adev->virt.req_init_data_ver = 0; in xgpu_nv_send_access_requests() 182 adev->virt.req_init_data_ver = in xgpu_nv_send_access_requests() 186 if (adev->virt.req_init_data_ver < 1) in xgpu_nv_send_access_requests() 187 adev->virt.req_init_data_ver = 1; in xgpu_nv_send_access_requests() 193 adev->virt.fw_reserve.checksum_key = in xgpu_nv_send_access_requests() 259 struct amdgpu_virt *virt = container_of(work, struct amdgpu_virt, flr_work); in xgpu_nv_mailbox_flr_work() local 260 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_nv_mailbox_flr_work() 320 schedule_work(&adev->virt.flr_work); in xgpu_nv_mailbox_rcv_irq() 348 adev->virt in xgpu_nv_mailbox_set_irq_funcs() [all...] |
H A D | mxgpu_ai.c | 179 adev->virt.fw_reserve.checksum_key = in xgpu_ai_send_access_requests() 238 struct amdgpu_virt *virt = container_of(work, struct amdgpu_virt, flr_work); in xgpu_ai_mailbox_flr_work() local 239 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_ai_mailbox_flr_work() 293 schedule_work(&adev->virt.flr_work); in xgpu_ai_mailbox_rcv_irq() 324 adev->virt.ack_irq.num_types = 1; in xgpu_ai_mailbox_set_irq_funcs() 325 adev->virt.ack_irq.funcs = &xgpu_ai_mailbox_ack_irq_funcs; in xgpu_ai_mailbox_set_irq_funcs() 326 adev->virt.rcv_irq.num_types = 1; in xgpu_ai_mailbox_set_irq_funcs() 327 adev->virt.rcv_irq.funcs = &xgpu_ai_mailbox_rcv_irq_funcs; in xgpu_ai_mailbox_set_irq_funcs() 334 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_BIF, 135, &adev->virt in xgpu_ai_mailbox_add_irq_id() [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_vf_error.c | 41 mutex_lock(&adev->virt.vf_errors.lock); in amdgpu_vf_error_put() 42 index = adev->virt.vf_errors.write_count % AMDGPU_VF_ERROR_ENTRY_SIZE; in amdgpu_vf_error_put() 43 adev->virt.vf_errors.code [index] = error_code; in amdgpu_vf_error_put() 44 adev->virt.vf_errors.flags [index] = error_flags; in amdgpu_vf_error_put() 45 adev->virt.vf_errors.data [index] = error_data; in amdgpu_vf_error_put() 46 adev->virt.vf_errors.write_count ++; in amdgpu_vf_error_put() 47 mutex_unlock(&adev->virt.vf_errors.lock); in amdgpu_vf_error_put() 58 (!adev->virt.ops) || (!adev->virt.ops->trans_msg)) { in amdgpu_vf_error_trans_all() 69 mutex_lock(&adev->virt in amdgpu_vf_error_trans_all() [all...] |
H A D | amdgpu_virt.c | 135 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_full_gpu() local 138 if (virt->ops && virt->ops->req_full_gpu) { in amdgpu_virt_request_full_gpu() 139 r = virt->ops->req_full_gpu(adev, init); in amdgpu_virt_request_full_gpu() 143 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_virt_request_full_gpu() 158 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_full_gpu() local 161 if (virt->ops && virt in amdgpu_virt_release_full_gpu() 179 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_reset_gpu() local 195 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_request_init_data() local 214 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_wait_reset() local 292 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_init_ras_err_handler_data() local 332 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_ras_release_bp() local 350 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_release_ras_err_handler_data() local 369 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_ras_add_bps() local 381 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_ras_reserve_bps() local 412 struct amdgpu_virt *virt = &adev->virt; amdgpu_virt_ras_check_bad_page() local [all...] |
H A D | mxgpu_nv.c | 187 adev->virt.req_init_data_ver = 0; in xgpu_nv_send_access_requests() 190 adev->virt.req_init_data_ver = in xgpu_nv_send_access_requests() 194 if (adev->virt.req_init_data_ver < 1) in xgpu_nv_send_access_requests() 195 adev->virt.req_init_data_ver = 1; in xgpu_nv_send_access_requests() 201 adev->virt.fw_reserve.checksum_key = in xgpu_nv_send_access_requests() 276 struct amdgpu_virt *virt = container_of(work, struct amdgpu_virt, flr_work); in xgpu_nv_mailbox_flr_work() local 277 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_nv_mailbox_flr_work() 350 &adev->virt.flr_work), in xgpu_nv_mailbox_rcv_irq() 380 adev->virt in xgpu_nv_mailbox_set_irq_funcs() [all...] |
H A D | mxgpu_ai.c | 181 adev->virt.fw_reserve.checksum_key = in xgpu_ai_send_access_requests() 189 adev->virt.req_init_data_ver = 0; in xgpu_ai_send_access_requests() 254 struct amdgpu_virt *virt = container_of(work, struct amdgpu_virt, flr_work); in xgpu_ai_mailbox_flr_work() local 255 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_ai_mailbox_flr_work() 322 &adev->virt.flr_work), in xgpu_ai_mailbox_rcv_irq() 355 adev->virt.ack_irq.num_types = 1; in xgpu_ai_mailbox_set_irq_funcs() 356 adev->virt.ack_irq.funcs = &xgpu_ai_mailbox_ack_irq_funcs; in xgpu_ai_mailbox_set_irq_funcs() 357 adev->virt.rcv_irq.num_types = 1; in xgpu_ai_mailbox_set_irq_funcs() 358 adev->virt in xgpu_ai_mailbox_set_irq_funcs() [all...] |
/kernel/linux/linux-6.6/drivers/staging/media/atomisp/pci/hmm/ |
H A D | hmm.c | 230 void hmm_free(ia_css_ptr virt) in hmm_free() argument 234 dev_dbg(atomisp_dev, "%s: free 0x%08x\n", __func__, virt); in hmm_free() 236 if (WARN_ON(virt == mmgr_EXCEPTION)) in hmm_free() 239 bo = hmm_bo_device_search_start(&bo_device, (unsigned int)virt); in hmm_free() 244 (unsigned int)virt); in hmm_free() 278 static int load_and_flush_by_kmap(ia_css_ptr virt, void *data, in load_and_flush_by_kmap() argument 286 bo = hmm_bo_device_search_in_range(&bo_device, virt); in load_and_flush_by_kmap() 287 ret = hmm_check_bo(bo, virt); in load_and_flush_by_kmap() 293 idx = (virt - bo->start) >> PAGE_SHIFT; in load_and_flush_by_kmap() 294 offset = (virt in load_and_flush_by_kmap() 322 load_and_flush(ia_css_ptr virt, void *data, unsigned int bytes) load_and_flush() argument 357 hmm_load(ia_css_ptr virt, void *data, unsigned int bytes) hmm_load() argument 373 hmm_flush(ia_css_ptr virt, unsigned int bytes) hmm_flush() argument 379 hmm_store(ia_css_ptr virt, const void *data, unsigned int bytes) hmm_store() argument 462 hmm_set(ia_css_ptr virt, int c, unsigned int bytes) hmm_set() argument 522 hmm_virt_to_phys(ia_css_ptr virt) hmm_virt_to_phys() argument 541 hmm_mmap(struct vm_area_struct *vma, ia_css_ptr virt) hmm_mmap() argument 557 hmm_vmap(ia_css_ptr virt, bool cached) hmm_vmap() argument 578 hmm_flush_vmap(ia_css_ptr virt) hmm_flush_vmap() argument 593 hmm_vunmap(ia_css_ptr virt) hmm_vunmap() argument [all...] |
/kernel/linux/linux-5.10/drivers/staging/media/atomisp/pci/hmm/ |
H A D | hmm.c | 285 void hmm_free(ia_css_ptr virt) in hmm_free() argument 289 dev_dbg(atomisp_dev, "%s: free 0x%08x\n", __func__, virt); in hmm_free() 291 if (WARN_ON(virt == mmgr_EXCEPTION)) in hmm_free() 294 bo = hmm_bo_device_search_start(&bo_device, (unsigned int)virt); in hmm_free() 299 (unsigned int)virt); in hmm_free() 335 static int load_and_flush_by_kmap(ia_css_ptr virt, void *data, in load_and_flush_by_kmap() argument 343 bo = hmm_bo_device_search_in_range(&bo_device, virt); in load_and_flush_by_kmap() 344 ret = hmm_check_bo(bo, virt); in load_and_flush_by_kmap() 350 idx = (virt - bo->start) >> PAGE_SHIFT; in load_and_flush_by_kmap() 351 offset = (virt in load_and_flush_by_kmap() 379 load_and_flush(ia_css_ptr virt, void *data, unsigned int bytes) load_and_flush() argument 414 hmm_load(ia_css_ptr virt, void *data, unsigned int bytes) hmm_load() argument 430 hmm_flush(ia_css_ptr virt, unsigned int bytes) hmm_flush() argument 436 hmm_store(ia_css_ptr virt, const void *data, unsigned int bytes) hmm_store() argument 529 hmm_set(ia_css_ptr virt, int c, unsigned int bytes) hmm_set() argument 589 hmm_virt_to_phys(ia_css_ptr virt) hmm_virt_to_phys() argument 608 hmm_mmap(struct vm_area_struct *vma, ia_css_ptr virt) hmm_mmap() argument 624 hmm_vmap(ia_css_ptr virt, bool cached) hmm_vmap() argument 645 hmm_flush_vmap(ia_css_ptr virt) hmm_flush_vmap() argument 660 hmm_vunmap(ia_css_ptr virt) hmm_vunmap() argument [all...] |
/kernel/linux/linux-5.10/arch/m68k/mm/ |
H A D | sun3kmap.c | 29 static inline void do_page_mapin(unsigned long phys, unsigned long virt, in do_page_mapin() argument 39 sun3_put_pte(virt, pte); in do_page_mapin() 43 print_pte_vaddr(virt); in do_page_mapin() 48 static inline void do_pmeg_mapin(unsigned long phys, unsigned long virt, in do_pmeg_mapin() argument 52 if(sun3_get_segmap(virt & ~SUN3_PMEG_MASK) == SUN3_INVALID_PMEG) in do_pmeg_mapin() 53 mmu_emu_map_pmeg(sun3_get_context(), virt); in do_pmeg_mapin() local 56 do_page_mapin(phys, virt, type); in do_pmeg_mapin() 58 virt += PAGE_SIZE; in do_pmeg_mapin() 67 unsigned long offset, virt, ret; in sun3_ioremap() local 83 pr_info("ioremap: got virt in sun3_ioremap() [all...] |
/kernel/linux/linux-6.6/arch/m68k/mm/ |
H A D | sun3kmap.c | 29 static inline void do_page_mapin(unsigned long phys, unsigned long virt, in do_page_mapin() argument 39 sun3_put_pte(virt, pte); in do_page_mapin() 43 print_pte_vaddr(virt); in do_page_mapin() 48 static inline void do_pmeg_mapin(unsigned long phys, unsigned long virt, in do_pmeg_mapin() argument 52 if(sun3_get_segmap(virt & ~SUN3_PMEG_MASK) == SUN3_INVALID_PMEG) in do_pmeg_mapin() 53 mmu_emu_map_pmeg(sun3_get_context(), virt); in do_pmeg_mapin() local 56 do_page_mapin(phys, virt, type); in do_pmeg_mapin() 58 virt += PAGE_SIZE; in do_pmeg_mapin() 67 unsigned long offset, virt, ret; in sun3_ioremap() local 83 pr_info("ioremap: got virt in sun3_ioremap() [all...] |
/kernel/linux/linux-5.10/include/asm-generic/ |
H A D | sections.h | 85 * with virt == phys kernel mapping, for code that wants to check if an address 101 * @virt: virtual address of the memory object 104 * Returns: true if the object specified by @virt and @size is entirely 108 static inline bool memory_contains(void *begin, void *end, void *virt, in memory_contains() argument 111 return virt >= begin && virt + size <= end; in memory_contains() 119 * @virt: virtual address of the memory object 122 * Returns: true if an object's memory region, specified by @virt and @size, 125 static inline bool memory_intersects(void *begin, void *end, void *virt, in memory_intersects() argument 128 void *vend = virt in memory_intersects() 145 init_section_contains(void *virt, size_t size) init_section_contains() argument 159 init_section_intersects(void *virt, size_t size) init_section_intersects() argument [all...] |
/kernel/linux/linux-5.10/drivers/scsi/lpfc/ |
H A D | lpfc_mem.c | 107 pool->elements[i].virt = dma_pool_alloc(phba->lpfc_mbuf_pool, in lpfc_mem_alloc() 109 if (!pool->elements[i].virt) in lpfc_mem_alloc() 180 dma_pool_free(phba->lpfc_mbuf_pool, pool->elements[i].virt, in lpfc_mem_alloc() 252 dma_pool_free(phba->lpfc_mbuf_pool, pool->elements[i].virt, in lpfc_mem_free() 297 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 307 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 321 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 375 ret = pool->elements[pool->current_count].virt; in lpfc_mbuf_alloc() 385 * @virt: mbuf to free 397 __lpfc_mbuf_free(struct lpfc_hba * phba, void *virt, dma_addr_ argument 425 lpfc_mbuf_free(struct lpfc_hba * phba, void *virt, dma_addr_t dma) lpfc_mbuf_free() argument 468 lpfc_nvmet_buf_free(struct lpfc_hba *phba, void *virt, dma_addr_t dma) lpfc_nvmet_buf_free() argument [all...] |
/kernel/linux/linux-6.6/drivers/staging/media/atomisp/include/hmm/ |
H A D | hmm.h | 43 int hmm_load(ia_css_ptr virt, void *data, unsigned int bytes); 44 int hmm_store(ia_css_ptr virt, const void *data, unsigned int bytes); 45 int hmm_set(ia_css_ptr virt, int c, unsigned int bytes); 46 int hmm_flush(ia_css_ptr virt, unsigned int bytes); 51 phys_addr_t hmm_virt_to_phys(ia_css_ptr virt); 54 * map ISP memory starts with virt to kernel virtual address 57 * virt must be the start address of ISP memory (return by hmm_alloc), 60 void *hmm_vmap(ia_css_ptr virt, bool cached); 61 void hmm_vunmap(ia_css_ptr virt); 67 void hmm_flush_vmap(ia_css_ptr virt); [all...] |
/kernel/linux/linux-5.10/drivers/gpio/ |
H A D | gpio-stp-xway.c | 81 void __iomem *virt; member 104 return (xway_stp_r32(chip->virt, XWAY_STP_CPU0) & BIT(gpio)); in xway_stp_get() 123 xway_stp_w32(chip->virt, chip->shadow, XWAY_STP_CPU0); in xway_stp_set() 125 xway_stp_w32_mask(chip->virt, 0, XWAY_STP_CON_SWU, XWAY_STP_CON0); in xway_stp_set() 169 xway_stp_w32(chip->virt, 0, XWAY_STP_AR); in xway_stp_hw_init() 170 xway_stp_w32(chip->virt, 0, XWAY_STP_CPU0); in xway_stp_hw_init() 171 xway_stp_w32(chip->virt, 0, XWAY_STP_CPU1); in xway_stp_hw_init() 172 xway_stp_w32(chip->virt, XWAY_STP_CON_SWU, XWAY_STP_CON0); in xway_stp_hw_init() 173 xway_stp_w32(chip->virt, 0, XWAY_STP_CON1); in xway_stp_hw_init() 176 xway_stp_w32_mask(chip->virt, XWAY_STP_EDGE_MAS in xway_stp_hw_init() [all...] |
/kernel/linux/linux-6.6/drivers/gpio/ |
H A D | gpio-stp-xway.c | 82 void __iomem *virt; member 105 return (xway_stp_r32(chip->virt, XWAY_STP_CPU0) & BIT(gpio)); in xway_stp_get() 124 xway_stp_w32(chip->virt, chip->shadow, XWAY_STP_CPU0); in xway_stp_set() 126 xway_stp_w32_mask(chip->virt, 0, XWAY_STP_CON_SWU, XWAY_STP_CON0); in xway_stp_set() 170 xway_stp_w32(chip->virt, 0, XWAY_STP_AR); in xway_stp_hw_init() 171 xway_stp_w32(chip->virt, 0, XWAY_STP_CPU0); in xway_stp_hw_init() 172 xway_stp_w32(chip->virt, 0, XWAY_STP_CPU1); in xway_stp_hw_init() 173 xway_stp_w32(chip->virt, XWAY_STP_CON_SWU, XWAY_STP_CON0); in xway_stp_hw_init() 174 xway_stp_w32(chip->virt, 0, XWAY_STP_CON1); in xway_stp_hw_init() 177 xway_stp_w32_mask(chip->virt, XWAY_STP_EDGE_MAS in xway_stp_hw_init() [all...] |
/kernel/linux/linux-5.10/drivers/staging/media/atomisp/include/hmm/ |
H A D | hmm.h | 45 int hmm_load(ia_css_ptr virt, void *data, unsigned int bytes); 46 int hmm_store(ia_css_ptr virt, const void *data, unsigned int bytes); 47 int hmm_set(ia_css_ptr virt, int c, unsigned int bytes); 48 int hmm_flush(ia_css_ptr virt, unsigned int bytes); 53 phys_addr_t hmm_virt_to_phys(ia_css_ptr virt); 56 * map ISP memory starts with virt to kernel virtual address 59 * virt must be the start address of ISP memory (return by hmm_alloc), 62 void *hmm_vmap(ia_css_ptr virt, bool cached); 63 void hmm_vunmap(ia_css_ptr virt); 69 void hmm_flush_vmap(ia_css_ptr virt); [all...] |
/kernel/linux/linux-6.6/include/asm-generic/ |
H A D | sections.h | 84 * @virt: virtual address of the memory object 87 * Returns: true if the object specified by @virt and @size is entirely 91 static inline bool memory_contains(void *begin, void *end, void *virt, in memory_contains() argument 94 return virt >= begin && virt + size <= end; in memory_contains() 102 * @virt: virtual address of the memory object 105 * Returns: true if an object's memory region, specified by @virt and @size, 108 static inline bool memory_intersects(void *begin, void *end, void *virt, in memory_intersects() argument 111 void *vend = virt + size; in memory_intersects() 113 if (virt < en in memory_intersects() 128 init_section_contains(void *virt, size_t size) init_section_contains() argument 142 init_section_intersects(void *virt, size_t size) init_section_intersects() argument [all...] |
/kernel/linux/linux-5.10/drivers/media/platform/s5p-mfc/ |
H A D | s5p_mfc_opr.c | 54 b->virt = dev->mem_virt + offset; in s5p_mfc_alloc_priv_buf() 61 b->virt = dma_alloc_coherent(mem_dev, b->size, &b->dma, GFP_KERNEL); in s5p_mfc_alloc_priv_buf() 62 if (!b->virt) in s5p_mfc_alloc_priv_buf() 67 dma_free_coherent(mem_dev, b->size, b->virt, b->dma); in s5p_mfc_alloc_priv_buf() 72 mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma); in s5p_mfc_alloc_priv_buf() 87 b->virt = dma_alloc_coherent(mem_dev, b->size, &b->dma, GFP_KERNEL); in s5p_mfc_alloc_generic_buf() 88 if (!b->virt) in s5p_mfc_alloc_generic_buf() 91 mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma); in s5p_mfc_alloc_generic_buf() 109 dma_free_coherent(mem_dev, b->size, b->virt, b->dma); in s5p_mfc_release_priv_buf() 111 b->virt in s5p_mfc_release_priv_buf() [all...] |
/kernel/linux/linux-6.6/drivers/media/platform/samsung/s5p-mfc/ |
H A D | s5p_mfc_opr.c | 54 b->virt = dev->mem_virt + offset; in s5p_mfc_alloc_priv_buf() 61 b->virt = dma_alloc_coherent(mem_dev, b->size, &b->dma, GFP_KERNEL); in s5p_mfc_alloc_priv_buf() 62 if (!b->virt) in s5p_mfc_alloc_priv_buf() 67 dma_free_coherent(mem_dev, b->size, b->virt, b->dma); in s5p_mfc_alloc_priv_buf() 72 mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma); in s5p_mfc_alloc_priv_buf() 87 b->virt = dma_alloc_coherent(mem_dev, b->size, &b->dma, GFP_KERNEL); in s5p_mfc_alloc_generic_buf() 88 if (!b->virt) in s5p_mfc_alloc_generic_buf() 91 mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma); in s5p_mfc_alloc_generic_buf() 109 dma_free_coherent(mem_dev, b->size, b->virt, b->dma); in s5p_mfc_release_priv_buf() 111 b->virt in s5p_mfc_release_priv_buf() [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/qlogic/qed/ |
H A D | qed_chain.c | 73 void *virt, *virt_next; in qed_chain_free_next_ptr() local 77 virt = chain->p_virt_addr; in qed_chain_free_next_ptr() 81 if (!virt) in qed_chain_free_next_ptr() 84 next = virt + size; in qed_chain_free_next_ptr() 88 dma_free_coherent(dev, chain->page_size, virt, phys); in qed_chain_free_next_ptr() 90 virt = virt_next; in qed_chain_free_next_ptr() 202 void *virt, *virt_prev = NULL; in qed_chain_alloc_next_ptr() local 207 virt = dma_alloc_coherent(dev, chain->page_size, &phys, in qed_chain_alloc_next_ptr() 209 if (!virt) in qed_chain_alloc_next_ptr() 213 qed_chain_init_mem(chain, virt, phy in qed_chain_alloc_next_ptr() 236 void *virt; qed_chain_alloc_single() local 257 void *virt; qed_chain_alloc_pbl() local [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/qlogic/qed/ |
H A D | qed_chain.c | 73 void *virt, *virt_next; in qed_chain_free_next_ptr() local 77 virt = chain->p_virt_addr; in qed_chain_free_next_ptr() 81 if (!virt) in qed_chain_free_next_ptr() 84 next = virt + size; in qed_chain_free_next_ptr() 88 dma_free_coherent(dev, chain->page_size, virt, phys); in qed_chain_free_next_ptr() 90 virt = virt_next; in qed_chain_free_next_ptr() 202 void *virt, *virt_prev = NULL; in qed_chain_alloc_next_ptr() local 207 virt = dma_alloc_coherent(dev, chain->page_size, &phys, in qed_chain_alloc_next_ptr() 209 if (!virt) in qed_chain_alloc_next_ptr() 213 qed_chain_init_mem(chain, virt, phy in qed_chain_alloc_next_ptr() 236 void *virt; qed_chain_alloc_single() local 257 void *virt; qed_chain_alloc_pbl() local [all...] |
/kernel/linux/linux-6.6/drivers/scsi/lpfc/ |
H A D | lpfc_mem.c | 107 pool->elements[i].virt = dma_pool_alloc(phba->lpfc_mbuf_pool, in lpfc_mem_alloc() 109 if (!pool->elements[i].virt) in lpfc_mem_alloc() 180 dma_pool_free(phba->lpfc_mbuf_pool, pool->elements[i].virt, in lpfc_mem_alloc() 252 dma_pool_free(phba->lpfc_mbuf_pool, pool->elements[i].virt, in lpfc_mem_free() 297 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 307 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 321 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 342 phba->cgn_i->virt, phba->cgn_i->phys); in lpfc_mem_free_all() 391 ret = pool->elements[pool->current_count].virt; in lpfc_mbuf_alloc() 401 * @virt 413 __lpfc_mbuf_free(struct lpfc_hba * phba, void *virt, dma_addr_t dma) __lpfc_mbuf_free() argument 441 lpfc_mbuf_free(struct lpfc_hba * phba, void *virt, dma_addr_t dma) lpfc_mbuf_free() argument 484 lpfc_nvmet_buf_free(struct lpfc_hba *phba, void *virt, dma_addr_t dma) lpfc_nvmet_buf_free() argument [all...] |