Lines Matching defs:u_ctx

768 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
774 qid = u_ctx->lldi.rxq_ids[rxqidx];
775 fid = u_ctx->lldi.rxq_ids[0];
778 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[portno]);
809 struct uld_ctx *u_ctx = ULD_CTX(ctx);
826 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
1170 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1216 wrparam.qid = u_ctx->lldi.rxq_ids[reqctx->rxqidx];
1225 skb->dev = u_ctx->lldi.ports[0];
1376 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1388 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1395 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx],
1399 skb->dev = u_ctx->lldi.ports[0];
1418 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1434 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1438 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx],
1442 skb->dev = u_ctx->lldi.ports[0];
1449 struct uld_ctx *u_ctx = NULL;
1454 u_ctx = assign_chcr_device();
1455 if (!u_ctx) {
1460 ctx->dev = &u_ctx->dev;
1461 ntxq = u_ctx->lldi.ntxq;
1462 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan;
1463 txq_perchan = ntxq / u_ctx->lldi.nchan;
1465 ctx->nrxq = u_ctx->lldi.nrxq;
1574 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1585 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
1636 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr,
1638 if (dma_mapping_error(&u_ctx->lldi.pdev->dev,
1666 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1698 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1706 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1743 skb->dev = u_ctx->lldi.ports[0];
1748 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1771 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1825 skb->dev = u_ctx->lldi.ports[0];
1839 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1857 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1864 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1918 skb->dev = u_ctx->lldi.ports[0];
1923 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1934 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1953 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1961 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
2012 skb->dev = u_ctx->lldi.ports[0];
2017 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
2029 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2080 skb->dev = u_ctx->lldi.ports[0];
2096 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm));
2109 dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr,
2136 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
2373 struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm));
2375 chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op);
2444 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2464 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2718 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2722 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2762 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2766 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2970 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2980 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
3142 struct uld_ctx *u_ctx = ULD_CTX(ctx);
3159 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
3746 struct uld_ctx *u_ctx = ULD_CTX(ctx);
3763 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
3778 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], size);
3785 skb->dev = u_ctx->lldi.ports[0];