/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/subdev/fault/ |
H A D | gp10b.c | 49 gp10b_fault_new(struct nvkm_device *device, int index, struct nvkm_fault **pfault) gp10b_fault_new() argument
|
H A D | gp100.c | 85 gp100_fault_new(struct nvkm_device *device, int index, struct nvkm_fault **pfault) gp100_fault_new() argument
|
H A D | base.c | 172 nvkm_fault_new_(const struct nvkm_fault_func *func, struct nvkm_device *device, int index, struct nvkm_fault **pfault) nvkm_fault_new_() argument
|
H A D | tu102.c | 165 tu102_fault_new(struct nvkm_device *device, int index, struct nvkm_fault **pfault) tu102_fault_new() argument
|
H A D | gv100.c | 231 gv100_fault_new(struct nvkm_device *device, int index, struct nvkm_fault **pfault) gv100_fault_new() argument
|
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/nvkm/subdev/fault/ |
H A D | gp10b.c | 49 gp10b_fault_new(struct nvkm_device *device, enum nvkm_subdev_type type, int inst, struct nvkm_fault **pfault) gp10b_fault_new() argument
|
H A D | gp100.c | 85 gp100_fault_new(struct nvkm_device *device, enum nvkm_subdev_type type, int inst, struct nvkm_fault **pfault) gp100_fault_new() argument
|
H A D | base.c | 155 nvkm_fault_new_(const struct nvkm_fault_func *func, struct nvkm_device *device, enum nvkm_subdev_type type, int inst, struct nvkm_fault **pfault) nvkm_fault_new_() argument
|
H A D | tu102.c | 175 tu102_fault_new(struct nvkm_device *device, enum nvkm_subdev_type type, int inst, struct nvkm_fault **pfault) tu102_fault_new() argument
|
H A D | gv100.c | 237 gv100_fault_new(struct nvkm_device *device, enum nvkm_subdev_type type, int inst, struct nvkm_fault **pfault) gv100_fault_new() argument
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/ |
H A D | odp.c | 444 mlx5_ib_page_fault_resume(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault, int error) mlx5_ib_page_fault_resume() argument 1055 pagefault_data_segments(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault, void *wqe, void *wqe_end, u32 *bytes_mapped, u32 *total_wqe_bytes, bool receive_queue) pagefault_data_segments() argument 1127 mlx5_ib_mr_initiator_pfault_handler( struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault, struct mlx5_ib_qp *qp, void **wqe, void **wqe_end, int wqe_length) mlx5_ib_mr_initiator_pfault_handler() argument 1266 mlx5_ib_mr_wqe_pfault_handler(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault) mlx5_ib_mr_wqe_pfault_handler() argument 1362 mlx5_ib_mr_rdma_pfault_handler(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault) mlx5_ib_mr_rdma_pfault_handler() argument 1431 mlx5_ib_pfault(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault) mlx5_ib_pfault() argument 1451 struct mlx5_pagefault *pfault = container_of(work, mlx5_ib_eqe_pf_action() local 1463 struct mlx5_pagefault *pfault; mlx5_ib_eq_pf_process() local [all...] |
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx5/ |
H A D | odp.c | 384 mlx5_ib_page_fault_resume(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault, int error) mlx5_ib_page_fault_resume() argument 981 pagefault_data_segments(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault, void *wqe, void *wqe_end, u32 *bytes_mapped, u32 *total_wqe_bytes, bool receive_queue) pagefault_data_segments() argument 1054 mlx5_ib_mr_initiator_pfault_handler( struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault, struct mlx5_ib_qp *qp, void **wqe, void **wqe_end, int wqe_length) mlx5_ib_mr_initiator_pfault_handler() argument 1193 mlx5_ib_mr_wqe_pfault_handler(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault) mlx5_ib_mr_wqe_pfault_handler() argument 1289 mlx5_ib_mr_rdma_pfault_handler(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault) mlx5_ib_mr_rdma_pfault_handler() argument 1358 mlx5_ib_pfault(struct mlx5_ib_dev *dev, struct mlx5_pagefault *pfault) mlx5_ib_pfault() argument 1378 struct mlx5_pagefault *pfault = container_of(work, mlx5_ib_eqe_pf_action() local 1390 struct mlx5_pagefault *pfault; mlx5_ib_eq_pf_process() local [all...] |