Lines Matching defs:tpd

1423 	he_dev->tpd_pool = dma_pool_create("tpd", &he_dev->pci_dev->dev,
1426 hprintk("unable to create tpd dma_pool\n");
1608 struct he_tpd *tpd;
1611 tpd = dma_pool_alloc(he_dev->tpd_pool, GFP_ATOMIC, &mapping);
1612 if (tpd == NULL)
1615 tpd->status = TPD_ADDR(mapping);
1616 tpd->reserved = 0;
1617 tpd->iovec[0].addr = 0; tpd->iovec[0].len = 0;
1618 tpd->iovec[1].addr = 0; tpd->iovec[1].len = 0;
1619 tpd->iovec[2].addr = 0; tpd->iovec[2].len = 0;
1621 return tpd;
1804 struct he_tpd *tpd;
1818 tpd = NULL;
1821 tpd = __tpd;
1827 if (tpd == NULL) {
1828 hprintk("unable to locate tpd for dma buffer %x\n",
1835 he_mkcid(he_dev, tpd->vcc->vpi, tpd->vcc->vci));
1836 if (tpd->vcc)
1837 wake_up(&HE_VCC(tpd->vcc)->tx_waitq);
1843 if (tpd->iovec[slot].addr)
1845 tpd->iovec[slot].addr,
1846 tpd->iovec[slot].len & TPD_LEN_MASK,
1848 if (tpd->iovec[slot].len & TPD_LST)
1853 if (tpd->skb) { /* && !TBRQ_MULTIPLE(he_dev->tbrq_head) */
1854 if (tpd->vcc && tpd->vcc->pop)
1855 tpd->vcc->pop(tpd->vcc, tpd->skb);
1857 dev_kfree_skb_any(tpd->skb);
1861 if (tpd)
1862 dma_pool_free(he_dev->tpd_pool, tpd, TPD_ADDR(tpd->status));
2050 __enqueue_tpd(struct he_dev *he_dev, struct he_tpd *tpd, unsigned cid)
2055 tpd, cid, he_dev->tpdrq_tail);
2079 * push tpd onto a transmit backlog queue
2084 if (tpd->iovec[slot].addr)
2086 tpd->iovec[slot].addr,
2087 tpd->iovec[slot].len & TPD_LEN_MASK,
2090 if (tpd->skb) {
2091 if (tpd->vcc->pop)
2092 tpd->vcc->pop(tpd->vcc, tpd->skb);
2094 dev_kfree_skb_any(tpd->skb);
2095 atomic_inc(&tpd->vcc->stats->tx_err);
2097 dma_pool_free(he_dev->tpd_pool, tpd, TPD_ADDR(tpd->status));
2103 list_add_tail(&tpd->entry, &he_dev->outstanding_tpds);
2104 he_dev->tpdrq_tail->tpd = TPD_ADDR(tpd->status);
2333 struct he_tpd *tpd;
2424 tpd = __alloc_tpd(he_dev);
2425 if (tpd == NULL) {
2429 tpd->status |= TPD_EOS | TPD_INT;
2430 tpd->skb = NULL;
2431 tpd->vcc = vcc;
2436 __enqueue_tpd(he_dev, tpd, cid);
2491 struct he_tpd *tpd;
2524 tpd = __alloc_tpd(he_dev);
2525 if (tpd == NULL) {
2536 tpd->status |= TPD_CELLTYPE(TPD_USERCELL);
2543 tpd->status |= TPD_CELLTYPE(pti);
2545 tpd->status |= TPD_CLP;
2551 tpd->iovec[slot].addr = dma_map_single(&he_dev->pci_dev->dev, skb->data,
2553 tpd->iovec[slot].len = skb_headlen(skb);
2559 if (slot == TPD_MAXIOV) { /* queue tpd; start new tpd */
2560 tpd->vcc = vcc;
2561 tpd->skb = NULL; /* not the last fragment
2565 __enqueue_tpd(he_dev, tpd, cid);
2566 tpd = __alloc_tpd(he_dev);
2567 if (tpd == NULL) {
2576 tpd->status |= TPD_USERCELL;
2580 tpd->iovec[slot].addr = skb_frag_dma_map(&he_dev->pci_dev->dev,
2582 tpd->iovec[slot].len = skb_frag_size(frag);
2587 tpd->iovec[slot - 1].len |= TPD_LST;
2589 tpd->address0 = dma_map_single(&he_dev->pci_dev->dev, skb->data, skb->len, DMA_TO_DEVICE);
2590 tpd->length0 = skb->len | TPD_LST;
2592 tpd->status |= TPD_INT;
2594 tpd->vcc = vcc;
2595 tpd->skb = skb;
2599 __enqueue_tpd(he_dev, tpd, cid);