Home
last modified time | relevance | path

Searched refs:xor_srcs (Results 1 - 9 of 9) sorted by relevance

/kernel/linux/linux-5.10/drivers/dma/ioat/
H A Dinit.c787 struct page *xor_srcs[IOAT_NUM_SRC_TEST]; in ioat_xor_val_self_test() local
810 xor_srcs[src_idx] = alloc_page(GFP_KERNEL); in ioat_xor_val_self_test()
811 if (!xor_srcs[src_idx]) { in ioat_xor_val_self_test()
813 __free_page(xor_srcs[src_idx]); in ioat_xor_val_self_test()
821 __free_page(xor_srcs[src_idx]); in ioat_xor_val_self_test()
827 u8 *ptr = page_address(xor_srcs[src_idx]); in ioat_xor_val_self_test()
858 dma_srcs[i] = dma_map_page(dev, xor_srcs[i], 0, PAGE_SIZE, in ioat_xor_val_self_test()
921 xor_val_srcs[i] = xor_srcs[i]; in ioat_xor_val_self_test()
1043 __free_page(xor_srcs[src_idx]); in ioat_xor_val_self_test()
/kernel/linux/linux-6.6/drivers/dma/ioat/
H A Dinit.c787 struct page *xor_srcs[IOAT_NUM_SRC_TEST]; in ioat_xor_val_self_test() local
810 xor_srcs[src_idx] = alloc_page(GFP_KERNEL); in ioat_xor_val_self_test()
811 if (!xor_srcs[src_idx]) { in ioat_xor_val_self_test()
813 __free_page(xor_srcs[src_idx]); in ioat_xor_val_self_test()
821 __free_page(xor_srcs[src_idx]); in ioat_xor_val_self_test()
827 u8 *ptr = page_address(xor_srcs[src_idx]); in ioat_xor_val_self_test()
858 dma_srcs[i] = dma_map_page(dev, xor_srcs[i], 0, PAGE_SIZE, in ioat_xor_val_self_test()
921 xor_val_srcs[i] = xor_srcs[i]; in ioat_xor_val_self_test()
1043 __free_page(xor_srcs[src_idx]); in ioat_xor_val_self_test()
/kernel/linux/linux-5.10/drivers/dma/
H A Dmv_xor.c876 struct page *xor_srcs[MV_XOR_NUM_SRC_TEST]; in mv_chan_xor_self_test() local
889 xor_srcs[src_idx] = alloc_page(GFP_KERNEL); in mv_chan_xor_self_test()
890 if (!xor_srcs[src_idx]) { in mv_chan_xor_self_test()
892 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test()
900 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test()
906 u8 *ptr = page_address(xor_srcs[src_idx]); in mv_chan_xor_self_test()
934 unmap->addr[i] = dma_map_page(dma_chan->device->dev, xor_srcs[i], in mv_chan_xor_self_test()
1004 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test()
H A Diop-adma.c910 struct page *xor_srcs[IOP_ADMA_NUM_SRC_TEST]; in iop_adma_xor_val_self_test() local
926 xor_srcs[src_idx] = alloc_page(GFP_KERNEL); in iop_adma_xor_val_self_test()
927 if (!xor_srcs[src_idx]) { in iop_adma_xor_val_self_test()
929 __free_page(xor_srcs[src_idx]); in iop_adma_xor_val_self_test()
937 __free_page(xor_srcs[src_idx]); in iop_adma_xor_val_self_test()
943 u8 *ptr = page_address(xor_srcs[src_idx]); in iop_adma_xor_val_self_test()
968 dma_srcs[i] = dma_map_page(dma_chan->device->dev, xor_srcs[i], in iop_adma_xor_val_self_test()
1007 zero_sum_srcs[i] = xor_srcs[i]; in iop_adma_xor_val_self_test()
1073 __free_page(xor_srcs[src_idx]); in iop_adma_xor_val_self_test()
/kernel/linux/linux-6.6/drivers/dma/
H A Dmv_xor.c876 struct page *xor_srcs[MV_XOR_NUM_SRC_TEST]; in mv_chan_xor_self_test() local
889 xor_srcs[src_idx] = alloc_page(GFP_KERNEL); in mv_chan_xor_self_test()
890 if (!xor_srcs[src_idx]) { in mv_chan_xor_self_test()
892 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test()
900 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test()
906 u8 *ptr = page_address(xor_srcs[src_idx]); in mv_chan_xor_self_test()
934 unmap->addr[i] = dma_map_page(dma_chan->device->dev, xor_srcs[i], in mv_chan_xor_self_test()
1004 __free_page(xor_srcs[src_idx]); in mv_chan_xor_self_test()
/kernel/linux/linux-5.10/drivers/md/
H A Draid5.c1499 struct page **xor_srcs = to_addr_page(percpu, 0); in ops_run_compute5() local
1519 xor_srcs[count++] = sh->dev[i].page; in ops_run_compute5()
1528 tx = async_memcpy(xor_dest, xor_srcs[0], off_dest, off_srcs[0], in ops_run_compute5()
1531 tx = async_xor_offs(xor_dest, off_dest, xor_srcs, off_srcs, count, in ops_run_compute5()
1796 struct page **xor_srcs = to_addr_page(percpu, 0); in ops_run_prexor5() local
1803 struct page *xor_dest = xor_srcs[count++] = sh->dev[pd_idx].page; in ops_run_prexor5()
1818 xor_srcs[count++] = dev->orig_page; in ops_run_prexor5()
1821 xor_srcs[count++] = dev->page; in ops_run_prexor5()
1827 tx = async_xor_offs(xor_dest, off_dest, xor_srcs, off_srcs, count, in ops_run_prexor5()
1979 struct page **xor_srcs; in ops_run_reconstruct5() local
2154 struct page **xor_srcs = to_addr_page(percpu, 0); ops_run_check_p() local
[all...]
H A Draid5-ppl.c720 struct page *xor_srcs[] = { page1, page2 }; in ppl_xor() local
724 tx = async_xor(page1, xor_srcs, 0, 2, size, &submit); in ppl_xor()
/kernel/linux/linux-6.6/drivers/md/
H A Draid5.c1577 struct page **xor_srcs = to_addr_page(percpu, 0); in ops_run_compute5() local
1597 xor_srcs[count++] = sh->dev[i].page; in ops_run_compute5()
1606 tx = async_memcpy(xor_dest, xor_srcs[0], off_dest, off_srcs[0], in ops_run_compute5()
1609 tx = async_xor_offs(xor_dest, off_dest, xor_srcs, off_srcs, count, in ops_run_compute5()
1874 struct page **xor_srcs = to_addr_page(percpu, 0); in ops_run_prexor5() local
1881 struct page *xor_dest = xor_srcs[count++] = sh->dev[pd_idx].page; in ops_run_prexor5()
1896 xor_srcs[count++] = dev->orig_page; in ops_run_prexor5()
1899 xor_srcs[count++] = dev->page; in ops_run_prexor5()
1905 tx = async_xor_offs(xor_dest, off_dest, xor_srcs, off_srcs, count, in ops_run_prexor5()
2057 struct page **xor_srcs; in ops_run_reconstruct5() local
2232 struct page **xor_srcs = to_addr_page(percpu, 0); ops_run_check_p() local
[all...]
H A Draid5-ppl.c711 struct page *xor_srcs[] = { page1, page2 }; in ppl_xor() local
715 tx = async_xor(page1, xor_srcs, 0, 2, size, &submit); in ppl_xor()

Completed in 22 milliseconds