Lines Matching defs:u_ctx

764 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
770 qid = u_ctx->lldi.rxq_ids[rxqidx];
771 fid = u_ctx->lldi.rxq_ids[0];
774 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[portno]);
802 struct uld_ctx *u_ctx = ULD_CTX(ctx);
819 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
1163 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1209 wrparam.qid = u_ctx->lldi.rxq_ids[reqctx->rxqidx];
1218 skb->dev = u_ctx->lldi.ports[0];
1369 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1381 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1388 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx],
1392 skb->dev = u_ctx->lldi.ports[0];
1411 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1427 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1431 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx],
1435 skb->dev = u_ctx->lldi.ports[0];
1442 struct uld_ctx *u_ctx = NULL;
1447 u_ctx = assign_chcr_device();
1448 if (!u_ctx) {
1453 ctx->dev = &u_ctx->dev;
1454 ntxq = u_ctx->lldi.ntxq;
1455 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan;
1456 txq_perchan = ntxq / u_ctx->lldi.nchan;
1458 ctx->nrxq = u_ctx->lldi.nrxq;
1568 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1579 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
1630 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr,
1632 if (dma_mapping_error(&u_ctx->lldi.pdev->dev,
1660 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1692 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1700 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1737 skb->dev = u_ctx->lldi.ports[0];
1742 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1765 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1819 skb->dev = u_ctx->lldi.ports[0];
1833 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1851 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1858 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1912 skb->dev = u_ctx->lldi.ports[0];
1917 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1928 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1947 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1955 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
2006 skb->dev = u_ctx->lldi.ports[0];
2011 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
2023 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2074 skb->dev = u_ctx->lldi.ports[0];
2090 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm));
2103 dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr,
2130 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
2368 struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm));
2370 chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op);
2439 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2459 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2713 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2717 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2757 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2761 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2965 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2975 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
3137 struct uld_ctx *u_ctx = ULD_CTX(ctx);
3154 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
3742 struct uld_ctx *u_ctx = ULD_CTX(ctx);
3759 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
3774 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], size);
3781 skb->dev = u_ctx->lldi.ports[0];