Home
last modified time | relevance | path

Searched refs:mthca_cq (Results 1 - 12 of 12) sorted by relevance

/kernel/linux/linux-5.10/drivers/infiniband/hw/mthca/
H A Dmthca_cq.c169 static inline struct mthca_cqe *get_cqe(struct mthca_cq *cq, int entry) in get_cqe()
179 static inline struct mthca_cqe *next_cqe_sw(struct mthca_cq *cq) in next_cqe_sw()
204 static inline void update_cons_index(struct mthca_dev *dev, struct mthca_cq *cq, in update_cons_index()
219 struct mthca_cq *cq; in mthca_cq_completion()
236 struct mthca_cq *cq; in mthca_cq_event()
273 void mthca_cq_clean(struct mthca_dev *dev, struct mthca_cq *cq, u32 qpn, in mthca_cq_clean()
325 void mthca_cq_resize_copy_cqes(struct mthca_cq *cq) in mthca_cq_resize_copy_cqes()
372 static void handle_error_cqe(struct mthca_dev *dev, struct mthca_cq *cq, in handle_error_cqe()
479 struct mthca_cq *cq, in mthca_poll_one()
658 struct mthca_cq *c in mthca_poll_cq()
[all...]
H A Dmthca_provider.h121 * struct mthca_cq/qp also has its own lock. An individual qp lock
126 * Each struct mthca_cq/qp also has an ref count, protected by the
133 * Finally, each struct mthca_cq/qp has a wait_queue_head_t for the
183 struct mthca_cq { struct
301 static inline struct mthca_cq *to_mcq(struct ib_cq *ibcq) in to_mcq()
303 return container_of(ibcq, struct mthca_cq, ibcq); in to_mcq()
H A Dmthca_dev.h489 struct mthca_cq *cq);
491 struct mthca_cq *cq);
495 void mthca_cq_clean(struct mthca_dev *dev, struct mthca_cq *cq, u32 qpn,
497 void mthca_cq_resize_copy_cqes(struct mthca_cq *cq);
535 struct mthca_cq *send_cq,
536 struct mthca_cq *recv_cq,
544 struct mthca_cq *send_cq,
545 struct mthca_cq *recv_cq,
H A DMakefile5 mthca_allocator.o mthca_eq.o mthca_pd.o mthca_cq.o \
H A Dmthca_qp.c1161 struct mthca_cq *send_cq, in mthca_alloc_qp_common()
1162 struct mthca_cq *recv_cq, in mthca_alloc_qp_common()
1290 struct mthca_cq *send_cq, in mthca_alloc_qp()
1291 struct mthca_cq *recv_cq, in mthca_alloc_qp()
1333 static void mthca_lock_cqs(struct mthca_cq *send_cq, struct mthca_cq *recv_cq)
1348 static void mthca_unlock_cqs(struct mthca_cq *send_cq, struct mthca_cq *recv_cq)
1365 struct mthca_cq *send_cq, in mthca_alloc_sqp()
1366 struct mthca_cq *recv_c in mthca_alloc_sqp()
[all...]
H A Dmthca_provider.c608 struct mthca_cq *cq; in mthca_create_cq()
677 static int mthca_alloc_resize_buf(struct mthca_dev *dev, struct mthca_cq *cq, in mthca_alloc_resize_buf()
725 struct mthca_cq *cq = to_mcq(ibcq); in mthca_resize_cq()
1116 INIT_RDMA_OBJ_SIZE(ib_cq, mthca_cq, ibcq),
/kernel/linux/linux-6.6/drivers/infiniband/hw/mthca/
H A Dmthca_cq.c169 static inline struct mthca_cqe *get_cqe(struct mthca_cq *cq, int entry) in get_cqe()
179 static inline struct mthca_cqe *next_cqe_sw(struct mthca_cq *cq) in next_cqe_sw()
204 static inline void update_cons_index(struct mthca_dev *dev, struct mthca_cq *cq, in update_cons_index()
219 struct mthca_cq *cq; in mthca_cq_completion()
236 struct mthca_cq *cq; in mthca_cq_event()
273 void mthca_cq_clean(struct mthca_dev *dev, struct mthca_cq *cq, u32 qpn, in mthca_cq_clean()
325 void mthca_cq_resize_copy_cqes(struct mthca_cq *cq) in mthca_cq_resize_copy_cqes()
372 static void handle_error_cqe(struct mthca_dev *dev, struct mthca_cq *cq, in handle_error_cqe()
479 struct mthca_cq *cq, in mthca_poll_one()
658 struct mthca_cq *c in mthca_poll_cq()
[all...]
H A Dmthca_provider.h121 * struct mthca_cq/qp also has its own lock. An individual qp lock
126 * Each struct mthca_cq/qp also has an ref count, protected by the
133 * Finally, each struct mthca_cq/qp has a wait_queue_head_t for the
183 struct mthca_cq { struct
301 static inline struct mthca_cq *to_mcq(struct ib_cq *ibcq) in to_mcq()
303 return container_of(ibcq, struct mthca_cq, ibcq); in to_mcq()
H A Dmthca_dev.h489 struct mthca_cq *cq);
491 struct mthca_cq *cq);
495 void mthca_cq_clean(struct mthca_dev *dev, struct mthca_cq *cq, u32 qpn,
497 void mthca_cq_resize_copy_cqes(struct mthca_cq *cq);
535 struct mthca_cq *send_cq,
536 struct mthca_cq *recv_cq,
544 struct mthca_cq *send_cq,
545 struct mthca_cq *recv_cq,
H A DMakefile5 mthca_allocator.o mthca_eq.o mthca_pd.o mthca_cq.o \
H A Dmthca_qp.c1164 struct mthca_cq *send_cq, in mthca_alloc_qp_common()
1165 struct mthca_cq *recv_cq, in mthca_alloc_qp_common()
1293 struct mthca_cq *send_cq, in mthca_alloc_qp()
1294 struct mthca_cq *recv_cq, in mthca_alloc_qp()
1336 static void mthca_lock_cqs(struct mthca_cq *send_cq, struct mthca_cq *recv_cq)
1351 static void mthca_unlock_cqs(struct mthca_cq *send_cq, struct mthca_cq *recv_cq)
1368 struct mthca_cq *send_cq, in mthca_alloc_sqp()
1369 struct mthca_cq *recv_c in mthca_alloc_sqp()
[all...]
H A Dmthca_provider.c582 struct mthca_cq *cq; in mthca_create_cq()
651 static int mthca_alloc_resize_buf(struct mthca_dev *dev, struct mthca_cq *cq, in mthca_alloc_resize_buf()
699 struct mthca_cq *cq = to_mcq(ibcq); in mthca_resize_cq()
1096 INIT_RDMA_OBJ_SIZE(ib_cq, mthca_cq, ibcq),

Completed in 15 milliseconds