Lines Matching refs:msg
39 static int fill_sq(struct sk_buff *msg, struct t4_wq *wq)
42 if (rdma_nl_put_driver_u32(msg, "sqid", wq->sq.qid))
44 if (rdma_nl_put_driver_u32(msg, "flushed", wq->flushed))
46 if (rdma_nl_put_driver_u32(msg, "memsize", wq->sq.memsize))
48 if (rdma_nl_put_driver_u32(msg, "cidx", wq->sq.cidx))
50 if (rdma_nl_put_driver_u32(msg, "pidx", wq->sq.pidx))
52 if (rdma_nl_put_driver_u32(msg, "wq_pidx", wq->sq.wq_pidx))
54 if (rdma_nl_put_driver_u32(msg, "flush_cidx", wq->sq.flush_cidx))
56 if (rdma_nl_put_driver_u32(msg, "in_use", wq->sq.in_use))
58 if (rdma_nl_put_driver_u32(msg, "size", wq->sq.size))
60 if (rdma_nl_put_driver_u32_hex(msg, "flags", wq->sq.flags))
67 static int fill_rq(struct sk_buff *msg, struct t4_wq *wq)
70 if (rdma_nl_put_driver_u32(msg, "rqid", wq->rq.qid))
72 if (rdma_nl_put_driver_u32(msg, "memsize", wq->rq.memsize))
74 if (rdma_nl_put_driver_u32(msg, "cidx", wq->rq.cidx))
76 if (rdma_nl_put_driver_u32(msg, "pidx", wq->rq.pidx))
78 if (rdma_nl_put_driver_u32(msg, "wq_pidx", wq->rq.wq_pidx))
80 if (rdma_nl_put_driver_u32(msg, "msn", wq->rq.msn))
82 if (rdma_nl_put_driver_u32_hex(msg, "rqt_hwaddr", wq->rq.rqt_hwaddr))
84 if (rdma_nl_put_driver_u32(msg, "rqt_size", wq->rq.rqt_size))
86 if (rdma_nl_put_driver_u32(msg, "in_use", wq->rq.in_use))
88 if (rdma_nl_put_driver_u32(msg, "size", wq->rq.size))
95 static int fill_swsqe(struct sk_buff *msg, struct t4_sq *sq, u16 idx,
98 if (rdma_nl_put_driver_u32(msg, "idx", idx))
100 if (rdma_nl_put_driver_u32(msg, "opcode", sqe->opcode))
102 if (rdma_nl_put_driver_u32(msg, "complete", sqe->complete))
105 rdma_nl_put_driver_u32(msg, "cqe_status", CQE_STATUS(&sqe->cqe)))
107 if (rdma_nl_put_driver_u32(msg, "signaled", sqe->signaled))
109 if (rdma_nl_put_driver_u32(msg, "flushed", sqe->flushed))
119 static int fill_swsqes(struct sk_buff *msg, struct t4_sq *sq,
125 if (fill_swsqe(msg, sq, first_idx, first_sqe))
129 if (fill_swsqe(msg, sq, last_idx, last_sqe))
137 int c4iw_fill_res_qp_entry(struct sk_buff *msg, struct ib_qp *ibqp)
150 table_attr = nla_nest_start_noflag(msg, RDMA_NLDEV_ATTR_DRIVER);
173 if (fill_sq(msg, &wq))
176 if (fill_swsqes(msg, &wq.sq, first_sq_idx, fsp, last_sq_idx, lsp))
179 if (fill_rq(msg, &wq))
182 nla_nest_end(msg, table_attr);
186 nla_nest_cancel(msg, table_attr);
196 int c4iw_fill_res_cm_id_entry(struct sk_buff *msg,
216 table_attr = nla_nest_start_noflag(msg, RDMA_NLDEV_ATTR_DRIVER);
234 if (rdma_nl_put_driver_u32(msg, "state", epcp->state))
236 if (rdma_nl_put_driver_u64_hex(msg, "flags", epcp->flags))
238 if (rdma_nl_put_driver_u64_hex(msg, "history", epcp->history))
242 if (rdma_nl_put_driver_u32(msg, "stid", listen_ep->stid))
244 if (rdma_nl_put_driver_u32(msg, "backlog", listen_ep->backlog))
247 if (rdma_nl_put_driver_u32(msg, "hwtid", ep->hwtid))
249 if (rdma_nl_put_driver_u32(msg, "ord", ep->ord))
251 if (rdma_nl_put_driver_u32(msg, "ird", ep->ird))
253 if (rdma_nl_put_driver_u32(msg, "emss", ep->emss))
256 if (!ep->parent_ep && rdma_nl_put_driver_u32(msg, "atid",
260 nla_nest_end(msg, table_attr);
265 nla_nest_cancel(msg, table_attr);
271 static int fill_cq(struct sk_buff *msg, struct t4_cq *cq)
273 if (rdma_nl_put_driver_u32(msg, "cqid", cq->cqid))
275 if (rdma_nl_put_driver_u32(msg, "memsize", cq->memsize))
277 if (rdma_nl_put_driver_u32(msg, "size", cq->size))
279 if (rdma_nl_put_driver_u32(msg, "cidx", cq->cidx))
281 if (rdma_nl_put_driver_u32(msg, "cidx_inc", cq->cidx_inc))
283 if (rdma_nl_put_driver_u32(msg, "sw_cidx", cq->sw_cidx))
285 if (rdma_nl_put_driver_u32(msg, "sw_pidx", cq->sw_pidx))
287 if (rdma_nl_put_driver_u32(msg, "sw_in_use", cq->sw_in_use))
289 if (rdma_nl_put_driver_u32(msg, "vector", cq->vector))
291 if (rdma_nl_put_driver_u32(msg, "gen", cq->gen))
293 if (rdma_nl_put_driver_u32(msg, "error", cq->error))
295 if (rdma_nl_put_driver_u64_hex(msg, "bits_type_ts",
298 if (rdma_nl_put_driver_u64_hex(msg, "flags", cq->flags))
307 static int fill_cqe(struct sk_buff *msg, struct t4_cqe *cqe, u16 idx,
310 if (rdma_nl_put_driver_u32(msg, qstr, idx))
312 if (rdma_nl_put_driver_u32_hex(msg, "header",
315 if (rdma_nl_put_driver_u32(msg, "len", be32_to_cpu(cqe->len)))
317 if (rdma_nl_put_driver_u32_hex(msg, "wrid_hi",
320 if (rdma_nl_put_driver_u32_hex(msg, "wrid_low",
323 if (rdma_nl_put_driver_u64_hex(msg, "bits_type_ts",
333 static int fill_hwcqes(struct sk_buff *msg, struct t4_cq *cq,
339 if (fill_cqe(msg, cqes, idx, "hwcq_idx"))
342 if (fill_cqe(msg, cqes + 1, idx, "hwcq_idx"))
350 static int fill_swcqes(struct sk_buff *msg, struct t4_cq *cq,
359 if (fill_cqe(msg, cqes, idx, "swcq_idx"))
364 if (fill_cqe(msg, cqes + 1, idx, "swcq_idx"))
372 int c4iw_fill_res_cq_entry(struct sk_buff *msg, struct ib_cq *ibcq)
385 table_attr = nla_nest_start_noflag(msg, RDMA_NLDEV_ATTR_DRIVER);
413 if (fill_cq(msg, &cq))
416 if (fill_swcqes(msg, &cq, swcqes))
419 if (fill_hwcqes(msg, &cq, hwcqes))
422 nla_nest_end(msg, table_attr);
426 nla_nest_cancel(msg, table_attr);
431 int c4iw_fill_res_mr_entry(struct sk_buff *msg, struct ib_mr *ibmr)
443 table_attr = nla_nest_start_noflag(msg, RDMA_NLDEV_ATTR_DRIVER);
454 if (rdma_nl_put_driver_u32_hex(msg, "idx", stag >> 8))
456 if (rdma_nl_put_driver_u32(msg, "valid",
459 if (rdma_nl_put_driver_u32_hex(msg, "key", stag & 0xff))
461 if (rdma_nl_put_driver_u32(msg, "state",
464 if (rdma_nl_put_driver_u32(msg, "pdid",
467 if (rdma_nl_put_driver_u32_hex(msg, "perm",
470 if (rdma_nl_put_driver_u32(msg, "ps",
473 if (rdma_nl_put_driver_u64(msg, "len",
476 if (rdma_nl_put_driver_u32_hex(msg, "pbl_addr",
480 nla_nest_end(msg, table_attr);
484 nla_nest_cancel(msg, table_attr);