/kernel/linux/linux-5.10/fs/nilfs2/ |
H A D | page.c | 103 struct page *spage = sbh->b_page, *dpage = dbh->b_page; in nilfs_copy_buffer() local 107 kaddr1 = kmap_atomic(dpage); in nilfs_copy_buffer() 124 SetPageUptodate(dpage); in nilfs_copy_buffer() 126 ClearPageUptodate(dpage); in nilfs_copy_buffer() 128 SetPageMappedToDisk(dpage); in nilfs_copy_buffer() 130 ClearPageMappedToDisk(dpage); in nilfs_copy_buffer() 254 struct page *page = pvec.pages[i], *dpage; in nilfs_copy_dirty_pages() local 260 dpage = grab_cache_page(dmap, page->index); in nilfs_copy_dirty_pages() 261 if (unlikely(!dpage)) { in nilfs_copy_dirty_pages() 271 nilfs_copy_page(dpage, pag in nilfs_copy_dirty_pages() 308 struct page *page = pvec.pages[i], *dpage; nilfs_copy_back_pages() local [all...] |
/kernel/linux/linux-6.6/lib/ |
H A D | test_hmm.c | 41 * For device_private pages, dpage is just a dummy struct page 602 struct page *dpage = NULL; in dmirror_devmem_alloc_page() local 608 * For ZONE_DEVICE coherent type we use the actual dpage to store the in dmirror_devmem_alloc_page() 619 dpage = mdevice->free_pages; in dmirror_devmem_alloc_page() 620 mdevice->free_pages = dpage->zone_device_data; in dmirror_devmem_alloc_page() 625 if (dmirror_allocate_chunk(mdevice, &dpage)) in dmirror_devmem_alloc_page() 629 zone_device_page_init(dpage); in dmirror_devmem_alloc_page() 630 dpage->zone_device_data = rpage; in dmirror_devmem_alloc_page() 631 return dpage; in dmirror_devmem_alloc_page() 650 struct page *dpage; in dmirror_migrate_alloc_and_copy() local 753 struct page *dpage; dmirror_migrate_finalize_and_map() local 860 struct page *dpage, *spage; dmirror_devmem_fault_alloc_and_copy() local 1234 struct page *dpage, *spage; dmirror_device_evict_chunk() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/ |
H A D | nouveau_dmem.c | 142 struct page *dpage, dma_addr_t *dma_addr) in nouveau_dmem_copy_one() 146 lock_page(dpage); in nouveau_dmem_copy_one() 148 *dma_addr = dma_map_page(dev, dpage, 0, PAGE_SIZE, DMA_BIDIRECTIONAL); in nouveau_dmem_copy_one() 167 struct page *spage, *dpage; in nouveau_dmem_migrate_to_ram() local 196 dpage = alloc_page_vma(GFP_HIGHUSER, vmf->vma, vmf->address); in nouveau_dmem_migrate_to_ram() 197 if (!dpage) in nouveau_dmem_migrate_to_ram() 200 dst = migrate_pfn(page_to_pfn(dpage)); in nouveau_dmem_migrate_to_ram() 205 ret = nouveau_dmem_copy_one(drm, spage, dpage, &dma_addr); in nouveau_dmem_migrate_to_ram() 390 struct page *dpage; in nouveau_dmem_evict_chunk() local 397 dpage in nouveau_dmem_evict_chunk() 141 nouveau_dmem_copy_one(struct nouveau_drm *drm, struct page *spage, struct page *dpage, dma_addr_t *dma_addr) nouveau_dmem_copy_one() argument 620 struct page *dpage, *spage; nouveau_dmem_migrate_copy_one() local [all...] |
/kernel/linux/linux-5.10/arch/powerpc/kvm/ |
H A D | book3s_hv_uvmem.c | 513 struct page *dpage, *spage; in __kvmppc_svm_page_out() local 543 dpage = alloc_page_vma(GFP_HIGHUSER, vma, start); in __kvmppc_svm_page_out() 544 if (!dpage) { in __kvmppc_svm_page_out() 549 lock_page(dpage); in __kvmppc_svm_page_out() 551 pfn = page_to_pfn(dpage); in __kvmppc_svm_page_out() 567 unlock_page(dpage); in __kvmppc_svm_page_out() 568 __free_page(dpage); in __kvmppc_svm_page_out() 690 struct page *dpage = NULL; in kvmppc_uvmem_get_page() local 717 dpage = pfn_to_page(uvmem_pfn); in kvmppc_uvmem_get_page() 718 dpage in kvmppc_uvmem_get_page() 744 struct page *dpage; kvmppc_svm_page_in() local [all...] |
/kernel/linux/linux-5.10/lib/ |
H A D | test_hmm.c | 539 struct page *dpage = NULL; in dmirror_devmem_alloc_page() local 553 dpage = mdevice->free_pages; in dmirror_devmem_alloc_page() 554 mdevice->free_pages = dpage->zone_device_data; in dmirror_devmem_alloc_page() 559 if (!dmirror_allocate_chunk(mdevice, &dpage)) in dmirror_devmem_alloc_page() 563 dpage->zone_device_data = rpage; in dmirror_devmem_alloc_page() 564 get_page(dpage); in dmirror_devmem_alloc_page() 565 lock_page(dpage); in dmirror_devmem_alloc_page() 566 return dpage; in dmirror_devmem_alloc_page() 584 struct page *dpage; in dmirror_migrate_alloc_and_copy() local 596 dpage in dmirror_migrate_alloc_and_copy() 636 struct page *dpage; dmirror_migrate_finalize_and_map() local 1028 struct page *dpage, *spage; dmirror_devmem_fault_alloc_and_copy() local [all...] |
/kernel/linux/linux-6.6/arch/powerpc/kvm/ |
H A D | book3s_hv_uvmem.c | 520 struct page *dpage, *spage; in __kvmppc_svm_page_out() local 550 dpage = alloc_page_vma(GFP_HIGHUSER, vma, start); in __kvmppc_svm_page_out() 551 if (!dpage) { in __kvmppc_svm_page_out() 556 lock_page(dpage); in __kvmppc_svm_page_out() 558 pfn = page_to_pfn(dpage); in __kvmppc_svm_page_out() 574 unlock_page(dpage); in __kvmppc_svm_page_out() 575 __free_page(dpage); in __kvmppc_svm_page_out() 697 struct page *dpage = NULL; in kvmppc_uvmem_get_page() local 724 dpage = pfn_to_page(uvmem_pfn); in kvmppc_uvmem_get_page() 725 dpage in kvmppc_uvmem_get_page() 750 struct page *dpage; kvmppc_svm_page_in() local [all...] |
/kernel/linux/linux-5.10/fs/f2fs/ |
H A D | acl.c | 168 struct page *dpage) in __f2fs_get_acl() 178 retval = f2fs_getxattr(inode, name_index, "", NULL, 0, dpage); in __f2fs_get_acl() 184 retval, dpage); in __f2fs_get_acl() 334 struct page *dpage) in f2fs_acl_create() 346 p = __f2fs_get_acl(dir, ACL_TYPE_DEFAULT, dpage); in f2fs_acl_create() 384 struct page *dpage) in f2fs_init_acl() 389 error = f2fs_acl_create(dir, &inode->i_mode, &default_acl, &acl, dpage); in f2fs_init_acl() 167 __f2fs_get_acl(struct inode *inode, int type, struct page *dpage) __f2fs_get_acl() argument 332 f2fs_acl_create(struct inode *dir, umode_t *mode, struct posix_acl **default_acl, struct posix_acl **acl, struct page *dpage) f2fs_acl_create() argument 383 f2fs_init_acl(struct inode *inode, struct inode *dir, struct page *ipage, struct page *dpage) f2fs_init_acl() argument
|
H A D | acl.h | 45 struct page *ipage, struct page *dpage) in f2fs_init_acl() 44 f2fs_init_acl(struct inode *inode, struct inode *dir, struct page *ipage, struct page *dpage) f2fs_init_acl() argument
|
H A D | xattr.h | 148 size_t buffer_size, struct page *dpage) in f2fs_getxattr() 146 f2fs_getxattr(struct inode *inode, int index, const char *name, void *buffer, size_t buffer_size, struct page *dpage) f2fs_getxattr() argument
|
H A D | dir.c | 503 const struct f2fs_filename *fname, struct page *dpage) in f2fs_init_inode_metadata() 524 err = f2fs_init_acl(inode, dir, page, dpage); in f2fs_init_inode_metadata() 502 f2fs_init_inode_metadata(struct inode *inode, struct inode *dir, const struct f2fs_filename *fname, struct page *dpage) f2fs_init_inode_metadata() argument
|
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/ |
H A D | nouveau_dmem.c | 145 struct page *dpage, *spage; in nouveau_dmem_fault_copy_one() local 152 dpage = alloc_page_vma(GFP_HIGHUSER, vmf->vma, vmf->address); in nouveau_dmem_fault_copy_one() 153 if (!dpage) in nouveau_dmem_fault_copy_one() 155 lock_page(dpage); in nouveau_dmem_fault_copy_one() 157 *dma_addr = dma_map_page(dev, dpage, 0, PAGE_SIZE, DMA_BIDIRECTIONAL); in nouveau_dmem_fault_copy_one() 169 args->dst[0] = migrate_pfn(page_to_pfn(dpage)) | MIGRATE_PFN_LOCKED; in nouveau_dmem_fault_copy_one() 176 __free_page(dpage); in nouveau_dmem_fault_copy_one() 573 struct page *dpage, *spage; in nouveau_dmem_migrate_copy_one() local 580 dpage = nouveau_dmem_page_alloc_locked(drm); in nouveau_dmem_migrate_copy_one() 581 if (!dpage) in nouveau_dmem_migrate_copy_one() [all...] |
/kernel/linux/linux-5.10/drivers/dma/ |
H A D | nbpfaxi.c | 693 struct nbpf_desc_page *dpage = (void *)get_zeroed_page(GFP_KERNEL | GFP_DMA); in nbpf_desc_page_alloc() local 702 if (!dpage) in nbpf_desc_page_alloc() 706 __func__, NBPF_DESCS_PER_PAGE, NBPF_SEGMENTS_PER_PAGE, sizeof(*dpage)); in nbpf_desc_page_alloc() 708 for (i = 0, ldesc = dpage->ldesc, hwdesc = dpage->hwdesc; in nbpf_desc_page_alloc() 709 i < ARRAY_SIZE(dpage->ldesc); in nbpf_desc_page_alloc() 720 for (i = 0, desc = dpage->desc; in nbpf_desc_page_alloc() 721 i < ARRAY_SIZE(dpage->desc); in nbpf_desc_page_alloc() 737 list_add(&dpage->node, &chan->desc_page); in nbpf_desc_page_alloc() 740 return ARRAY_SIZE(dpage in nbpf_desc_page_alloc() 1066 struct nbpf_desc_page *dpage, *tmp; nbpf_free_chan_resources() local [all...] |
/kernel/linux/linux-6.6/drivers/dma/ |
H A D | nbpfaxi.c | 692 struct nbpf_desc_page *dpage = (void *)get_zeroed_page(GFP_KERNEL | GFP_DMA); in nbpf_desc_page_alloc() local 701 if (!dpage) in nbpf_desc_page_alloc() 705 __func__, NBPF_DESCS_PER_PAGE, NBPF_SEGMENTS_PER_PAGE, sizeof(*dpage)); in nbpf_desc_page_alloc() 707 for (i = 0, ldesc = dpage->ldesc, hwdesc = dpage->hwdesc; in nbpf_desc_page_alloc() 708 i < ARRAY_SIZE(dpage->ldesc); in nbpf_desc_page_alloc() 719 for (i = 0, desc = dpage->desc; in nbpf_desc_page_alloc() 720 i < ARRAY_SIZE(dpage->desc); in nbpf_desc_page_alloc() 736 list_add(&dpage->node, &chan->desc_page); in nbpf_desc_page_alloc() 739 return ARRAY_SIZE(dpage in nbpf_desc_page_alloc() 1065 struct nbpf_desc_page *dpage, *tmp; nbpf_free_chan_resources() local [all...] |
/kernel/linux/linux-6.6/fs/f2fs/ |
H A D | acl.c | 169 struct page *dpage) in __f2fs_get_acl() 179 retval = f2fs_getxattr(inode, name_index, "", NULL, 0, dpage); in __f2fs_get_acl() 185 retval, dpage); in __f2fs_get_acl() 365 struct page *dpage) in f2fs_acl_create() 377 p = __f2fs_get_acl(dir, ACL_TYPE_DEFAULT, dpage); in f2fs_acl_create() 415 struct page *dpage) in f2fs_init_acl() 420 error = f2fs_acl_create(dir, &inode->i_mode, &default_acl, &acl, dpage); in f2fs_init_acl() 168 __f2fs_get_acl(struct inode *inode, int type, struct page *dpage) __f2fs_get_acl() argument 363 f2fs_acl_create(struct inode *dir, umode_t *mode, struct posix_acl **default_acl, struct posix_acl **acl, struct page *dpage) f2fs_acl_create() argument 414 f2fs_init_acl(struct inode *inode, struct inode *dir, struct page *ipage, struct page *dpage) f2fs_init_acl() argument
|
H A D | acl.h | 46 struct page *ipage, struct page *dpage) in f2fs_init_acl() 45 f2fs_init_acl(struct inode *inode, struct inode *dir, struct page *ipage, struct page *dpage) f2fs_init_acl() argument
|
H A D | xattr.h | 149 size_t buffer_size, struct page *dpage) in f2fs_getxattr() 147 f2fs_getxattr(struct inode *inode, int index, const char *name, void *buffer, size_t buffer_size, struct page *dpage) f2fs_getxattr() argument
|
H A D | dir.c | 537 const struct f2fs_filename *fname, struct page *dpage) in f2fs_init_inode_metadata() 558 err = f2fs_init_acl(inode, dir, page, dpage); in f2fs_init_inode_metadata() 536 f2fs_init_inode_metadata(struct inode *inode, struct inode *dir, const struct f2fs_filename *fname, struct page *dpage) f2fs_init_inode_metadata() argument
|
/kernel/linux/linux-6.6/fs/nilfs2/ |
H A D | page.c | 103 struct page *spage = sbh->b_page, *dpage = dbh->b_page; in nilfs_copy_buffer() local 107 kaddr1 = kmap_atomic(dpage); in nilfs_copy_buffer() 124 SetPageUptodate(dpage); in nilfs_copy_buffer() 126 ClearPageUptodate(dpage); in nilfs_copy_buffer() 128 SetPageMappedToDisk(dpage); in nilfs_copy_buffer() 130 ClearPageMappedToDisk(dpage); in nilfs_copy_buffer()
|
/kernel/linux/linux-6.6/mm/ |
H A D | migrate_device.c | 930 struct page *dpage; in migrate_device_coherent_page() local 946 dpage = alloc_page(GFP_USER | __GFP_NOWARN); in migrate_device_coherent_page() 947 if (dpage) { in migrate_device_coherent_page() 948 lock_page(dpage); in migrate_device_coherent_page() 949 dst_pfn = migrate_pfn(page_to_pfn(dpage)); in migrate_device_coherent_page() 954 copy_highpage(dpage, page); in migrate_device_coherent_page()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_migrate.c | 573 struct page *dpage; in svm_migrate_copy_to_ram() local 615 dpage = svm_migrate_get_sys_page(migrate->vma, addr); in svm_migrate_copy_to_ram() 616 if (!dpage) { in svm_migrate_copy_to_ram() 623 dst[i] = dma_map_page(dev, dpage, 0, PAGE_SIZE, DMA_FROM_DEVICE); in svm_migrate_copy_to_ram() 631 dst[i] >> PAGE_SHIFT, page_to_pfn(dpage)); in svm_migrate_copy_to_ram() 633 migrate->dst[i] = migrate_pfn(page_to_pfn(dpage)); in svm_migrate_copy_to_ram()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/3com/ |
H A D | typhoon.c | 1336 void *dpage; in typhoon_download_firmware() local 1357 dpage = dma_alloc_coherent(&pdev->dev, PAGE_SIZE, &dpage_dma, GFP_ATOMIC); in typhoon_download_firmware() 1358 if (!dpage) { in typhoon_download_firmware() 1422 dpage, len)); in typhoon_download_firmware() 1462 dma_free_coherent(&pdev->dev, PAGE_SIZE, dpage, dpage_dma); in typhoon_download_firmware()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/3com/ |
H A D | typhoon.c | 1333 void *dpage; in typhoon_download_firmware() local 1354 dpage = dma_alloc_coherent(&pdev->dev, PAGE_SIZE, &dpage_dma, GFP_ATOMIC); in typhoon_download_firmware() 1355 if (!dpage) { in typhoon_download_firmware() 1419 dpage, len)); in typhoon_download_firmware() 1459 dma_free_coherent(&pdev->dev, PAGE_SIZE, dpage, dpage_dma); in typhoon_download_firmware()
|
/kernel/linux/linux-5.10/drivers/crypto/ |
H A D | hifn_795x.c | 1326 struct page *spage, *dpage; in hifn_setup_dma() local 1347 dpage = sg_page(t); in hifn_setup_dma() 1352 dpage = sg_page(dst); in hifn_setup_dma() 1358 hifn_setup_dst_desc(dev, dpage, doff, len, n - len == 0); in hifn_setup_dma()
|
/kernel/linux/linux-6.6/drivers/crypto/ |
H A D | hifn_795x.c | 1326 struct page *spage, *dpage; in hifn_setup_dma() local 1347 dpage = sg_page(t); in hifn_setup_dma() 1352 dpage = sg_page(dst); in hifn_setup_dma() 1358 hifn_setup_dst_desc(dev, dpage, doff, len, n - len == 0); in hifn_setup_dma()
|
/kernel/linux/linux-5.10/drivers/scsi/ |
H A D | st.c | 4115 struct page *dpage = st_bp->reserved_pages[dst_seg]; in move_buffer_data() 4119 memmove(page_address(dpage) + dst_offset, 4100 struct page *dpage = st_bp->reserved_pages[dst_seg]; move_buffer_data() local
|