Lines Matching refs:cq_buf
1807 struct irdma_cq_buf *cq_buf = container_of(work, struct irdma_cq_buf, work);
1809 dma_free_coherent(cq_buf->hw->device, cq_buf->kmem_buf.size,
1810 cq_buf->kmem_buf.va, cq_buf->kmem_buf.pa);
1811 cq_buf->kmem_buf.va = NULL;
1812 kfree(cq_buf);
1826 struct irdma_cq_buf *cq_buf;
1830 cq_buf = list_entry(list_node, struct irdma_cq_buf, list);
1831 if (cq_buf == lcqe_buf)
1834 list_del(&cq_buf->list);
1835 queue_work(iwdev->cleanup_wq, &cq_buf->work);
1898 struct irdma_cq_buf *cq_buf = NULL;
1970 cq_buf = kzalloc(sizeof(*cq_buf), GFP_KERNEL);
1971 if (!cq_buf) {
2000 if (cq_buf) {
2001 cq_buf->kmem_buf = iwcq->kmem;
2002 cq_buf->hw = dev->hw;
2003 memcpy(&cq_buf->cq_uk, &iwcq->sc_cq.cq_uk, sizeof(cq_buf->cq_uk));
2004 INIT_WORK(&cq_buf->work, irdma_free_cqbuf);
2005 list_add_tail(&cq_buf->list, &iwcq->resize_list);
2020 kfree(cq_buf);
3632 struct irdma_cq_buf *cq_buf;
3645 cq_buf = container_of(list_node, struct irdma_cq_buf, list);
3647 ret = irdma_poll_one(&cq_buf->cq_uk, cur_cqe, entry + npolled);
3665 last_buf = cq_buf;