Home
last modified time | relevance | path

Searched refs:sdma (Results 1 - 25 of 91) sorted by relevance

1234

/kernel/linux/linux-6.6/drivers/dma/
H A Dimx-sdma.c3 // drivers/dma/imx-sdma.c
314 * struct sdma_context_data - sdma context specific to a channel
401 * @desc: sdma description including vd and other special member
402 * @sdma: pointer to the SDMA engine for this channel
425 * @data: specific sdma interface structure
440 struct sdma_engine *sdma; member
508 * ecspi ERR009165 fixed should be done in sdma script
667 { .compatible = "fsl,imx6q-sdma", .data = &sdma_imx6q, },
668 { .compatible = "fsl,imx53-sdma", .data = &sdma_imx53, },
669 { .compatible = "fsl,imx51-sdma",
685 chnenbl_ofs(struct sdma_engine *sdma, unsigned int event) chnenbl_ofs() argument
694 struct sdma_engine *sdma = sdmac->sdma; sdma_config_ownership() local
727 is_sdma_channel_enabled(struct sdma_engine *sdma, int channel) is_sdma_channel_enabled() argument
732 sdma_enable_channel(struct sdma_engine *sdma, int channel) sdma_enable_channel() argument
740 sdma_run_channel0(struct sdma_engine *sdma) sdma_run_channel0() argument
762 sdma_load_script(struct sdma_engine *sdma, void *buf, int size, u32 address) sdma_load_script() argument
796 struct sdma_engine *sdma = sdmac->sdma; sdma_event_enable() local
816 struct sdma_engine *sdma = sdmac->sdma; sdma_event_disable() local
835 struct sdma_engine *sdma = sdmac->sdma; sdma_start_desc() local
939 struct sdma_engine *sdma = dev_id; sdma_int_handler() local
980 struct sdma_engine *sdma = sdmac->sdma; sdma_get_pc() local
1099 struct sdma_engine *sdma = sdmac->sdma; sdma_load_context() local
1165 struct sdma_engine *sdma = sdmac->sdma; sdma_disable_channel() local
1226 struct sdma_engine *sdma = sdmac->sdma; sdma_set_watermarklevel_for_p2p() local
1348 struct sdma_engine *sdma = sdmac->sdma; sdma_set_channel_priority() local
1361 sdma_request_channel0(struct sdma_engine *sdma) sdma_request_channel0() argument
1483 struct sdma_engine *sdma = sdmac->sdma; sdma_free_chan_resources() local
1552 struct sdma_engine *sdma = sdmac->sdma; sdma_prep_memcpy() local
1609 struct sdma_engine *sdma = sdmac->sdma; sdma_prep_slave_sg() local
1693 struct sdma_engine *sdma = sdmac->sdma; sdma_prep_dma_cyclic() local
1801 struct sdma_engine *sdma = sdmac->sdma; sdma_config() local
1892 sdma_add_scripts(struct sdma_engine *sdma, const struct sdma_script_start_addrs *addr) sdma_add_scripts() argument
1931 struct sdma_engine *sdma = context; sdma_load_firmware() local
1995 sdma_event_remap(struct sdma_engine *sdma) sdma_event_remap() argument
2059 sdma_get_firmware(struct sdma_engine *sdma, const char *fw_name) sdma_get_firmware() argument
2071 sdma_init(struct sdma_engine *sdma) sdma_init() argument
2163 struct sdma_engine *sdma = ofdma->of_dma_data; sdma_xlate() local
2195 struct sdma_engine *sdma; sdma_probe() local
2363 struct sdma_engine *sdma = platform_get_drvdata(pdev); sdma_remove() local
[all...]
/kernel/linux/linux-5.10/drivers/dma/
H A Dimx-sdma.c3 // drivers/dma/imx-sdma.c
38 #include <linux/platform_data/dma-imx-sdma.h>
247 * struct sdma_context_data - sdma context specific to a channel
334 * @desc: sdma description including vd and other special member
335 * @sdma: pointer to the SDMA engine for this channel
358 * @data: specific sdma interface structure
365 struct sdma_engine *sdma; member
570 .name = "imx25-sdma",
573 .name = "imx31-sdma",
576 .name = "imx35-sdma",
617 chnenbl_ofs(struct sdma_engine *sdma, unsigned int event) chnenbl_ofs() argument
626 struct sdma_engine *sdma = sdmac->sdma; sdma_config_ownership() local
659 sdma_enable_channel(struct sdma_engine *sdma, int channel) sdma_enable_channel() argument
667 sdma_run_channel0(struct sdma_engine *sdma) sdma_run_channel0() argument
689 sdma_load_script(struct sdma_engine *sdma, void *buf, int size, u32 address) sdma_load_script() argument
724 struct sdma_engine *sdma = sdmac->sdma; sdma_event_enable() local
736 struct sdma_engine *sdma = sdmac->sdma; sdma_event_disable() local
755 struct sdma_engine *sdma = sdmac->sdma; sdma_start_desc() local
848 struct sdma_engine *sdma = dev_id; sdma_int_handler() local
886 struct sdma_engine *sdma = sdmac->sdma; sdma_get_pc() local
979 struct sdma_engine *sdma = sdmac->sdma; sdma_load_context() local
1040 struct sdma_engine *sdma = sdmac->sdma; sdma_disable_channel() local
1100 struct sdma_engine *sdma = sdmac->sdma; sdma_set_watermarklevel_for_p2p() local
1186 struct sdma_engine *sdma = sdmac->sdma; sdma_set_channel_priority() local
1199 sdma_request_channel0(struct sdma_engine *sdma) sdma_request_channel0() argument
1319 struct sdma_engine *sdma = sdmac->sdma; sdma_free_chan_resources() local
1383 struct sdma_engine *sdma = sdmac->sdma; sdma_prep_memcpy() local
1440 struct sdma_engine *sdma = sdmac->sdma; sdma_prep_slave_sg() local
1524 struct sdma_engine *sdma = sdmac->sdma; sdma_prep_dma_cyclic() local
1696 sdma_add_scripts(struct sdma_engine *sdma, const struct sdma_script_start_addrs *addr) sdma_add_scripts() argument
1722 struct sdma_engine *sdma = context; sdma_load_firmware() local
1784 sdma_event_remap(struct sdma_engine *sdma) sdma_event_remap() argument
1848 sdma_get_firmware(struct sdma_engine *sdma, const char *fw_name) sdma_get_firmware() argument
1860 sdma_init(struct sdma_engine *sdma) sdma_init() argument
1952 struct sdma_engine *sdma = ofdma->of_dma_data; sdma_xlate() local
1988 struct sdma_engine *sdma; sdma_probe() local
2180 struct sdma_engine *sdma = platform_get_drvdata(pdev); sdma_remove() local
[all...]
H A Dsirf-dma.c238 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_execute() local
247 base = sdma->base; in sirfsoc_dma_execute()
253 if (sdma->type == SIRFSOC_DMA_VER_A7V2) in sirfsoc_dma_execute()
257 sdma->exec_desc(sdesc, cid, schan->mode, base); in sirfsoc_dma_execute()
266 struct sirfsoc_dma *sdma = data; in sirfsoc_dma_irq() local
274 switch (sdma->type) { in sirfsoc_dma_irq()
277 is = readl(sdma->base + SIRFSOC_DMA_CH_INT); in sirfsoc_dma_irq()
278 reg = sdma->base + SIRFSOC_DMA_CH_INT; in sirfsoc_dma_irq()
282 schan = &sdma->channels[ch]; in sirfsoc_dma_irq()
300 is = readl(sdma in sirfsoc_dma_irq()
338 sirfsoc_dma_process_completed(struct sirfsoc_dma *sdma) sirfsoc_dma_process_completed() argument
398 struct sirfsoc_dma *sdma = from_tasklet(sdma, t, tasklet); sirfsoc_dma_tasklet() local
445 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); sirfsoc_dma_terminate_all() local
491 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); sirfsoc_dma_pause_chan() local
525 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); sirfsoc_dma_resume_chan() local
558 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan); sirfsoc_dma_alloc_chan_resources() local
599 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan); sirfsoc_dma_free_chan_resources() local
643 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan); sirfsoc_dma_tx_status() local
692 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan); sirfsoc_dma_prep_interleaved() local
830 struct sirfsoc_dma *sdma = ofdma->of_dma_data; of_dma_sirfsoc_xlate() local
844 struct sirfsoc_dma *sdma; sirfsoc_dma_probe() local
974 struct sirfsoc_dma *sdma = dev_get_drvdata(dev); sirfsoc_dma_remove() local
990 struct sirfsoc_dma *sdma = dev_get_drvdata(dev); sirfsoc_dma_runtime_suspend() local
998 struct sirfsoc_dma *sdma = dev_get_drvdata(dev); sirfsoc_dma_runtime_resume() local
1011 struct sirfsoc_dma *sdma = dev_get_drvdata(dev); sirfsoc_dma_pm_suspend() local
1058 struct sirfsoc_dma *sdma = dev_get_drvdata(dev); sirfsoc_dma_pm_resume() local
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/marvell/prestera/
H A Dprestera_rxtx.c109 struct prestera_sdma sdma; member
112 static int prestera_sdma_buf_init(struct prestera_sdma *sdma, in prestera_sdma_buf_init() argument
118 desc = dma_pool_alloc(sdma->desc_pool, GFP_DMA | GFP_KERNEL, &dma); in prestera_sdma_buf_init()
130 static u32 prestera_sdma_map(struct prestera_sdma *sdma, dma_addr_t pa) in prestera_sdma_map() argument
132 return sdma->map_addr + pa; in prestera_sdma_map()
135 static void prestera_sdma_rx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_init() argument
144 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_rx_desc_init()
152 static void prestera_sdma_rx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_set_next() argument
156 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_rx_desc_set_next()
159 static int prestera_sdma_rx_skb_alloc(struct prestera_sdma *sdma, in prestera_sdma_rx_skb_alloc() argument
189 prestera_sdma_rx_skb_get(struct prestera_sdma *sdma, struct prestera_sdma_buf *buf) prestera_sdma_rx_skb_get() argument
214 prestera_rxtx_process_skb(struct prestera_sdma *sdma, struct sk_buff *skb) prestera_rxtx_process_skb() argument
274 struct prestera_sdma *sdma; prestera_sdma_rx_poll() local
331 prestera_sdma_rx_fini(struct prestera_sdma *sdma) prestera_sdma_rx_fini() argument
365 prestera_sdma_rx_init(struct prestera_sdma *sdma) prestera_sdma_rx_init() argument
426 prestera_sdma_tx_desc_init(struct prestera_sdma *sdma, struct prestera_sdma_desc *desc) prestera_sdma_tx_desc_init() argument
433 prestera_sdma_tx_desc_set_next(struct prestera_sdma *sdma, struct prestera_sdma_desc *desc, dma_addr_t next) prestera_sdma_tx_desc_set_next() argument
440 prestera_sdma_tx_desc_set_buf(struct prestera_sdma *sdma, struct prestera_sdma_desc *desc, dma_addr_t buf, size_t len) prestera_sdma_tx_desc_set_buf() argument
464 prestera_sdma_tx_buf_map(struct prestera_sdma *sdma, struct prestera_sdma_buf *buf, struct sk_buff *skb) prestera_sdma_tx_buf_map() argument
481 prestera_sdma_tx_buf_unmap(struct prestera_sdma *sdma, struct prestera_sdma_buf *buf) prestera_sdma_tx_buf_unmap() argument
493 struct prestera_sdma *sdma; prestera_sdma_tx_recycle_work_fn() local
520 prestera_sdma_tx_init(struct prestera_sdma *sdma) prestera_sdma_tx_init() argument
571 prestera_sdma_tx_fini(struct prestera_sdma *sdma) prestera_sdma_tx_fini() argument
603 struct prestera_sdma *sdma = arg; prestera_rxtx_handle_event() local
614 struct prestera_sdma *sdma = &sw->rxtx->sdma; prestera_sdma_switch_init() local
674 struct prestera_sdma *sdma = &sw->rxtx->sdma; prestera_sdma_switch_fini() local
685 prestera_sdma_is_ready(struct prestera_sdma *sdma) prestera_sdma_is_ready() argument
690 prestera_sdma_tx_wait(struct prestera_sdma *sdma, struct prestera_tx_ring *tx_ring) prestera_sdma_tx_wait() argument
705 prestera_sdma_tx_start(struct prestera_sdma *sdma) prestera_sdma_tx_start() argument
711 prestera_sdma_xmit(struct prestera_sdma *sdma, struct sk_buff *skb) prestera_sdma_xmit() argument
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/marvell/prestera/
H A Dprestera_rxtx.c107 struct prestera_sdma sdma; member
110 static int prestera_sdma_buf_init(struct prestera_sdma *sdma, in prestera_sdma_buf_init() argument
116 desc = dma_pool_alloc(sdma->desc_pool, GFP_DMA | GFP_KERNEL, &dma); in prestera_sdma_buf_init()
128 static u32 prestera_sdma_map(struct prestera_sdma *sdma, dma_addr_t pa) in prestera_sdma_map() argument
130 return sdma->map_addr + pa; in prestera_sdma_map()
133 static void prestera_sdma_rx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_init() argument
142 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_rx_desc_init()
150 static void prestera_sdma_rx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_set_next() argument
154 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_rx_desc_set_next()
157 static int prestera_sdma_rx_skb_alloc(struct prestera_sdma *sdma, in prestera_sdma_rx_skb_alloc() argument
187 prestera_sdma_rx_skb_get(struct prestera_sdma *sdma, struct prestera_sdma_buf *buf) prestera_sdma_rx_skb_get() argument
212 prestera_rxtx_process_skb(struct prestera_sdma *sdma, struct sk_buff *skb) prestera_rxtx_process_skb() argument
276 struct prestera_sdma *sdma; prestera_sdma_rx_poll() local
333 prestera_sdma_rx_fini(struct prestera_sdma *sdma) prestera_sdma_rx_fini() argument
367 prestera_sdma_rx_init(struct prestera_sdma *sdma) prestera_sdma_rx_init() argument
428 prestera_sdma_tx_desc_init(struct prestera_sdma *sdma, struct prestera_sdma_desc *desc) prestera_sdma_tx_desc_init() argument
435 prestera_sdma_tx_desc_set_next(struct prestera_sdma *sdma, struct prestera_sdma_desc *desc, dma_addr_t next) prestera_sdma_tx_desc_set_next() argument
442 prestera_sdma_tx_desc_set_buf(struct prestera_sdma *sdma, struct prestera_sdma_desc *desc, dma_addr_t buf, size_t len) prestera_sdma_tx_desc_set_buf() argument
466 prestera_sdma_tx_buf_map(struct prestera_sdma *sdma, struct prestera_sdma_buf *buf, struct sk_buff *skb) prestera_sdma_tx_buf_map() argument
483 prestera_sdma_tx_buf_unmap(struct prestera_sdma *sdma, struct prestera_sdma_buf *buf) prestera_sdma_tx_buf_unmap() argument
495 struct prestera_sdma *sdma; prestera_sdma_tx_recycle_work_fn() local
522 prestera_sdma_tx_init(struct prestera_sdma *sdma) prestera_sdma_tx_init() argument
573 prestera_sdma_tx_fini(struct prestera_sdma *sdma) prestera_sdma_tx_fini() argument
605 struct prestera_sdma *sdma = arg; prestera_rxtx_handle_event() local
616 struct prestera_sdma *sdma = &sw->rxtx->sdma; prestera_sdma_switch_init() local
676 struct prestera_sdma *sdma = &sw->rxtx->sdma; prestera_sdma_switch_fini() local
687 prestera_sdma_is_ready(struct prestera_sdma *sdma) prestera_sdma_is_ready() argument
692 prestera_sdma_tx_wait(struct prestera_sdma *sdma, struct prestera_tx_ring *tx_ring) prestera_sdma_tx_wait() argument
707 prestera_sdma_tx_start(struct prestera_sdma *sdma) prestera_sdma_tx_start() argument
713 prestera_sdma_xmit(struct prestera_sdma *sdma, struct sk_buff *skb) prestera_sdma_xmit() argument
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_sdma.c41 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring()
42 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_instance_from_ring()
43 ring == &adev->sdma.instance[i].page) in amdgpu_sdma_get_instance_from_ring()
44 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring()
54 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring()
55 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_index_from_ring()
56 ring == &adev->sdma.instance[i].page) { in amdgpu_sdma_get_index_from_ring()
101 if (!adev->sdma.ras_if) { in amdgpu_sdma_ras_late_init()
102 adev->sdma.ras_if = kmalloc(sizeof(struct ras_common_if), GFP_KERNEL); in amdgpu_sdma_ras_late_init()
103 if (!adev->sdma in amdgpu_sdma_ras_late_init()
[all...]
H A Dsdma_v4_0.c530 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv()
562 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_destroy_inst_ctx()
563 release_firmware(adev->sdma.instance[i].fw); in sdma_v4_0_destroy_inst_ctx()
564 adev->sdma.instance[i].fw = NULL; in sdma_v4_0_destroy_inst_ctx()
572 memset((void*)adev->sdma.instance, 0, in sdma_v4_0_destroy_inst_ctx()
634 err = request_firmware(&adev->sdma.instance[0].fw, fw_name, adev->dev); in sdma_v4_0_init_microcode()
638 err = sdma_v4_0_init_inst_ctx(&adev->sdma.instance[0]); in sdma_v4_0_init_microcode()
642 for (i = 1; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode()
646 memcpy((void*)&adev->sdma.instance[i], in sdma_v4_0_init_microcode()
647 (void*)&adev->sdma in sdma_v4_0_init_microcode()
825 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v4_0_ring_insert_nop() local
959 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; sdma_v4_0_gfx_stop() local
1001 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; sdma_v4_0_page_stop() local
1735 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v4_0_ring_pad_ib() local
2030 u32 sdma[AMDGPU_MAX_SDMA_INSTANCES]; sdma_v4_0_wait_for_idle() local
[all...]
H A Dcik_sdma.c76 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_free_microcode()
77 release_firmware(adev->sdma.instance[i].fw); in cik_sdma_free_microcode()
78 adev->sdma.instance[i].fw = NULL; in cik_sdma_free_microcode()
135 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode()
140 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in cik_sdma_init_microcode()
143 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in cik_sdma_init_microcode()
148 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode()
149 release_firmware(adev->sdma.instance[i].fw); in cik_sdma_init_microcode()
150 adev->sdma.instance[i].fw = NULL; in cik_sdma_init_microcode()
203 struct amdgpu_sdma_instance *sdma in cik_sdma_ring_insert_nop() local
809 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); cik_sdma_ring_pad_ib() local
[all...]
H A Dsdma_v2_4.c116 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_free_microcode()
117 release_firmware(adev->sdma.instance[i].fw); in sdma_v2_4_free_microcode()
118 adev->sdma.instance[i].fw = NULL; in sdma_v2_4_free_microcode()
149 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_init_microcode()
154 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v2_4_init_microcode()
157 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in sdma_v2_4_init_microcode()
160 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v2_4_init_microcode()
161 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v2_4_init_microcode()
162 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v2_4_init_microcode()
163 if (adev->sdma in sdma_v2_4_init_microcode()
231 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v2_4_ring_insert_nop() local
748 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v2_4_ring_pad_ib() local
[all...]
H A Dsdma_v3_0.c253 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_free_microcode()
254 release_firmware(adev->sdma.instance[i].fw); in sdma_v3_0_free_microcode()
255 adev->sdma.instance[i].fw = NULL; in sdma_v3_0_free_microcode()
307 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
312 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v3_0_init_microcode()
315 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in sdma_v3_0_init_microcode()
318 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_init_microcode()
319 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v3_0_init_microcode()
320 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v3_0_init_microcode()
321 if (adev->sdma in sdma_v3_0_init_microcode()
405 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v3_0_ring_insert_nop() local
1019 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v3_0_ring_pad_ib() local
[all...]
H A Dsdma_v5_2.c119 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_destroy_inst_ctx()
120 release_firmware(adev->sdma.instance[i].fw); in sdma_v5_2_destroy_inst_ctx()
121 adev->sdma.instance[i].fw = NULL; in sdma_v5_2_destroy_inst_ctx()
127 memset((void*)adev->sdma.instance, 0, in sdma_v5_2_destroy_inst_ctx()
169 err = request_firmware(&adev->sdma.instance[0].fw, fw_name, adev->dev); in sdma_v5_2_init_microcode()
173 err = sdma_v5_2_init_inst_ctx(&adev->sdma.instance[0]); in sdma_v5_2_init_microcode()
177 for (i = 1; i < adev->sdma.num_instances; i++) { in sdma_v5_2_init_microcode()
180 memcpy((void*)&adev->sdma.instance[i], in sdma_v5_2_init_microcode()
181 (void*)&adev->sdma.instance[0], in sdma_v5_2_init_microcode()
185 err = request_firmware(&adev->sdma in sdma_v5_2_init_microcode()
333 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v5_2_ring_insert_nop() local
1068 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v5_2_ring_pad_ib() local
[all...]
H A Dsdma_v5_0.c229 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_init_microcode()
234 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v5_0_init_microcode()
237 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in sdma_v5_0_init_microcode()
240 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v5_0_init_microcode()
241 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v5_0_init_microcode()
242 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v5_0_init_microcode()
243 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v5_0_init_microcode()
244 adev->sdma.instance[i].burst_nop = true; in sdma_v5_0_init_microcode()
251 info->fw = adev->sdma.instance[i].fw; in sdma_v5_0_init_microcode()
260 for (i = 0; i < adev->sdma in sdma_v5_0_init_microcode()
384 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v5_0_ring_insert_nop() local
1131 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v5_0_ring_pad_ib() local
[all...]
H A Dsi_dma.c49 u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1; in si_dma_ring_get_wptr()
57 u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1; in si_dma_ring_set_wptr()
118 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_stop()
119 ring = &adev->sdma.instance[i].ring; in si_dma_stop()
137 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_start()
138 ring = &adev->sdma.instance[i].ring; in si_dma_start()
470 adev->sdma.num_instances = 2; in si_dma_early_init()
488 &adev->sdma.trap_irq); in si_dma_sw_init()
494 &adev->sdma.trap_irq); in si_dma_sw_init()
498 for (i = 0; i < adev->sdma in si_dma_sw_init()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_sdma.c42 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring()
43 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_instance_from_ring()
44 ring == &adev->sdma.instance[i].page) in amdgpu_sdma_get_instance_from_ring()
45 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring()
55 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring()
56 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_index_from_ring()
57 ring == &adev->sdma.instance[i].page) { in amdgpu_sdma_get_index_from_ring()
82 sdma[ring->idx].sdma_meta_data); in amdgpu_sdma_get_csa_mc_addr()
108 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_ras_late_init()
109 r = amdgpu_irq_get(adev, &adev->sdma in amdgpu_sdma_ras_late_init()
294 struct amdgpu_ring *sdma; amdgpu_sdma_unset_buffer_funcs_helper() local
[all...]
H A Dsdma_v4_4_2.c34 #include "sdma/sdma_4_4_2_offset.h"
35 #include "sdma/sdma_4_4_2_sh_mask.h"
105 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_inst_init_golden_registers()
134 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_init_microcode()
288 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v4_4_2_ring_insert_nop() local
292 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_4_2_ring_insert_nop()
428 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; in sdma_v4_4_2_inst_gfx_stop() local
433 sdma[i] = &adev->sdma in sdma_v4_4_2_inst_gfx_stop()
474 struct amdgpu_ring *sdma[AMDGPU_MAX_SDMA_INSTANCES]; sdma_v4_4_2_inst_page_stop() local
1165 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v4_4_2_ring_pad_ib() local
1488 u32 sdma[AMDGPU_MAX_SDMA_INSTANCES]; sdma_v4_4_2_wait_for_idle() local
[all...]
H A Dsdma_v3_0.c254 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_free_microcode()
255 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in sdma_v3_0_free_microcode()
306 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
311 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in sdma_v3_0_init_microcode()
314 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_init_microcode()
315 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v3_0_init_microcode()
316 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v3_0_init_microcode()
317 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v3_0_init_microcode()
318 adev->sdma.instance[i].burst_nop = true; in sdma_v3_0_init_microcode()
322 info->fw = adev->sdma in sdma_v3_0_init_microcode()
399 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v3_0_ring_insert_nop() local
1011 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v3_0_ring_pad_ib() local
[all...]
H A Dsdma_v4_0.c555 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv()
580 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode()
737 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v4_0_ring_insert_nop() local
741 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v4_0_ring_insert_nop()
880 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_gfx_enable()
916 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_page_stop()
965 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_ctx_switch_enable()
982 adev->sdma.instance[i].fw_version >= 14) in sdma_v4_0_ctx_switch_enable()
1006 if (adev->sdma in sdma_v4_0_enable()
1630 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v4_0_ring_pad_ib() local
1988 u32 sdma[AMDGPU_MAX_SDMA_INSTANCES]; sdma_v4_0_wait_for_idle() local
[all...]
H A Dcik_sdma.c77 for (i = 0; i < adev->sdma.num_instances; i++) in cik_sdma_free_microcode()
78 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in cik_sdma_free_microcode()
134 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode()
139 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in cik_sdma_init_microcode()
146 for (i = 0; i < adev->sdma.num_instances; i++) in cik_sdma_init_microcode()
147 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in cik_sdma_init_microcode()
199 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in cik_sdma_ring_insert_nop() local
203 if (sdma && sdma->burst_nop && (i == 0)) in cik_sdma_ring_insert_nop()
313 for (i = 0; i < adev->sdma in cik_sdma_gfx_stop()
803 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); cik_sdma_ring_pad_ib() local
[all...]
H A Dsdma_v2_4.c117 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v2_4_free_microcode()
118 amdgpu_ucode_release(&adev->sdma.instance[i].fw); in sdma_v2_4_free_microcode()
148 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_init_microcode()
153 err = amdgpu_ucode_request(adev, &adev->sdma.instance[i].fw, fw_name); in sdma_v2_4_init_microcode()
156 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v2_4_init_microcode()
157 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v2_4_init_microcode()
158 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v2_4_init_microcode()
159 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v2_4_init_microcode()
160 adev->sdma.instance[i].burst_nop = true; in sdma_v2_4_init_microcode()
165 info->fw = adev->sdma in sdma_v2_4_init_microcode()
225 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v2_4_ring_insert_nop() local
740 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v2_4_ring_pad_ib() local
[all...]
H A Dsi_dma.c49 u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1; in si_dma_ring_get_wptr()
57 u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1; in si_dma_ring_set_wptr()
120 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_stop()
135 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_start()
136 ring = &adev->sdma.instance[i].ring; in si_dma_start()
469 adev->sdma.num_instances = 2; in si_dma_early_init()
487 &adev->sdma.trap_irq); in si_dma_sw_init()
493 &adev->sdma.trap_irq); in si_dma_sw_init()
497 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_sw_init()
498 ring = &adev->sdma in si_dma_sw_init()
[all...]
H A Dsdma_v5_2.c208 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_2_ring_insert_nop() local
212 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_2_ring_insert_nop()
369 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_stop()
428 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_ctx_switch_enable()
467 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_enable()
495 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_resume()
496 ring = &adev->sdma.instance[i].ring; in sdma_v5_2_gfx_resume()
667 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_load_microcode()
668 if (!adev->sdma in sdma_v5_2_load_microcode()
1081 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v5_2_ring_pad_ib() local
[all...]
H A Dsdma_v6_0.c226 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v6_0_ring_insert_nop() local
230 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v6_0_ring_insert_nop()
386 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_gfx_stop()
422 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_ctxempty_int_enable()
452 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_enable()
478 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_gfx_resume()
479 ring = &adev->sdma.instance[i].ring; in sdma_v6_0_gfx_resume()
548 adev->doorbell_index.sdma_doorbell_range * adev->sdma.num_instances); in sdma_v6_0_gfx_resume()
637 if (!adev->sdma in sdma_v6_0_load_microcode()
1131 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v6_0_ring_pad_ib() local
[all...]
H A Dsdma_v5_0.c243 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_init_microcode()
401 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); in sdma_v5_0_ring_insert_nop() local
405 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v5_0_ring_insert_nop()
564 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_stop()
623 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_ctx_switch_enable()
665 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_enable()
692 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_resume()
693 ring = &adev->sdma.instance[i].ring; in sdma_v5_0_gfx_resume()
867 for (i = 0; i < adev->sdma in sdma_v5_0_load_microcode()
1245 struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring); sdma_v5_0_ring_pad_ib() local
[all...]
/kernel/linux/linux-5.10/arch/powerpc/platforms/52xx/
H A Dmpc52xx_pic.c134 static struct mpc52xx_sdma __iomem *sdma; variable
269 io_be_setbit(&sdma->IntMask, l2irq); in mpc52xx_sdma_mask()
275 io_be_clrbit(&sdma->IntMask, l2irq); in mpc52xx_sdma_unmask()
281 out_be32(&sdma->IntPend, 1 << l2irq); in mpc52xx_sdma_ack()
416 sdma = of_iomap(np, 0); in mpc52xx_init_irq()
418 if (!sdma) in mpc52xx_init_irq()
425 out_be32(&sdma->IntPend, 0xffffffff); /* 1 means clear pending */ in mpc52xx_init_irq()
426 out_be32(&sdma->IntMask, 0xffffffff); /* 1 means disabled */ in mpc52xx_init_irq()
507 status = in_be32(&sdma->IntPend); in mpc52xx_get_irq()
/kernel/linux/linux-6.6/arch/powerpc/platforms/52xx/
H A Dmpc52xx_pic.c135 static struct mpc52xx_sdma __iomem *sdma; variable
270 io_be_setbit(&sdma->IntMask, l2irq); in mpc52xx_sdma_mask()
276 io_be_clrbit(&sdma->IntMask, l2irq); in mpc52xx_sdma_unmask()
282 out_be32(&sdma->IntPend, 1 << l2irq); in mpc52xx_sdma_ack()
417 sdma = of_iomap(np, 0); in mpc52xx_init_irq()
419 if (!sdma) in mpc52xx_init_irq()
426 out_be32(&sdma->IntPend, 0xffffffff); /* 1 means clear pending */ in mpc52xx_init_irq()
427 out_be32(&sdma->IntMask, 0xffffffff); /* 1 means disabled */ in mpc52xx_init_irq()
508 status = in_be32(&sdma->IntPend); in mpc52xx_get_irq()

Completed in 31 milliseconds

1234