Lines Matching refs:lldi

774 	qid = u_ctx->lldi.rxq_ids[rxqidx];
775 fid = u_ctx->lldi.rxq_ids[0];
778 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[portno]);
826 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
1181 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
1204 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
1216 wrparam.qid = u_ctx->lldi.rxq_ids[reqctx->rxqidx];
1225 skb->dev = u_ctx->lldi.ports[0];
1237 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1281 err = chcr_cipher_dma_map(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1335 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
1364 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1388 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1395 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx],
1399 skb->dev = u_ctx->lldi.ports[0];
1434 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1438 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx],
1442 skb->dev = u_ctx->lldi.ports[0];
1461 ntxq = u_ctx->lldi.ntxq;
1462 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan;
1463 txq_perchan = ntxq / u_ctx->lldi.nchan;
1465 ctx->nrxq = u_ctx->lldi.nrxq;
1585 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
1636 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr,
1638 if (dma_mapping_error(&u_ctx->lldi.pdev->dev,
1695 /* Detach state for CHCR means lldi or padap is freed. Increasing
1696 * inflight count for dev guarantees that lldi and padap is valid
1698 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1706 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1743 skb->dev = u_ctx->lldi.ports[0];
1748 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1825 skb->dev = u_ctx->lldi.ports[0];
1857 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1864 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1918 skb->dev = u_ctx->lldi.ports[0];
1923 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1953 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1961 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
2012 skb->dev = u_ctx->lldi.ports[0];
2017 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
2080 skb->dev = u_ctx->lldi.ports[0];
2109 dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr,
2136 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
2375 chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op);
2396 error = chcr_aead_dma_map(&ULD_CTX(a_ctx(tfm))->lldi.pdev->dev, req,
2464 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2722 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2766 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2980 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
3159 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
3757 /* Detach state for CHCR means lldi or padap is freed.
3763 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
3778 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], size);
3785 skb->dev = u_ctx->lldi.ports[0];