Lines Matching defs:tpd
1426 he_dev->tpd_pool = dma_pool_create("tpd", &he_dev->pci_dev->dev,
1429 hprintk("unable to create tpd dma_pool\n");
1611 struct he_tpd *tpd;
1614 tpd = dma_pool_alloc(he_dev->tpd_pool, GFP_ATOMIC, &mapping);
1615 if (tpd == NULL)
1618 tpd->status = TPD_ADDR(mapping);
1619 tpd->reserved = 0;
1620 tpd->iovec[0].addr = 0; tpd->iovec[0].len = 0;
1621 tpd->iovec[1].addr = 0; tpd->iovec[1].len = 0;
1622 tpd->iovec[2].addr = 0; tpd->iovec[2].len = 0;
1624 return tpd;
1807 struct he_tpd *tpd;
1821 tpd = NULL;
1824 tpd = __tpd;
1830 if (tpd == NULL) {
1831 hprintk("unable to locate tpd for dma buffer %x\n",
1838 he_mkcid(he_dev, tpd->vcc->vpi, tpd->vcc->vci));
1839 if (tpd->vcc)
1840 wake_up(&HE_VCC(tpd->vcc)->tx_waitq);
1846 if (tpd->iovec[slot].addr)
1848 tpd->iovec[slot].addr,
1849 tpd->iovec[slot].len & TPD_LEN_MASK,
1851 if (tpd->iovec[slot].len & TPD_LST)
1856 if (tpd->skb) { /* && !TBRQ_MULTIPLE(he_dev->tbrq_head) */
1857 if (tpd->vcc && tpd->vcc->pop)
1858 tpd->vcc->pop(tpd->vcc, tpd->skb);
1860 dev_kfree_skb_any(tpd->skb);
1864 if (tpd)
1865 dma_pool_free(he_dev->tpd_pool, tpd, TPD_ADDR(tpd->status));
2053 __enqueue_tpd(struct he_dev *he_dev, struct he_tpd *tpd, unsigned cid)
2058 tpd, cid, he_dev->tpdrq_tail);
2082 * push tpd onto a transmit backlog queue
2087 if (tpd->iovec[slot].addr)
2089 tpd->iovec[slot].addr,
2090 tpd->iovec[slot].len & TPD_LEN_MASK,
2093 if (tpd->skb) {
2094 if (tpd->vcc->pop)
2095 tpd->vcc->pop(tpd->vcc, tpd->skb);
2097 dev_kfree_skb_any(tpd->skb);
2098 atomic_inc(&tpd->vcc->stats->tx_err);
2100 dma_pool_free(he_dev->tpd_pool, tpd, TPD_ADDR(tpd->status));
2106 list_add_tail(&tpd->entry, &he_dev->outstanding_tpds);
2107 he_dev->tpdrq_tail->tpd = TPD_ADDR(tpd->status);
2336 struct he_tpd *tpd;
2427 tpd = __alloc_tpd(he_dev);
2428 if (tpd == NULL) {
2432 tpd->status |= TPD_EOS | TPD_INT;
2433 tpd->skb = NULL;
2434 tpd->vcc = vcc;
2439 __enqueue_tpd(he_dev, tpd, cid);
2494 struct he_tpd *tpd;
2527 tpd = __alloc_tpd(he_dev);
2528 if (tpd == NULL) {
2539 tpd->status |= TPD_CELLTYPE(TPD_USERCELL);
2546 tpd->status |= TPD_CELLTYPE(pti);
2548 tpd->status |= TPD_CLP;
2554 tpd->iovec[slot].addr = dma_map_single(&he_dev->pci_dev->dev, skb->data,
2556 tpd->iovec[slot].len = skb_headlen(skb);
2562 if (slot == TPD_MAXIOV) { /* queue tpd; start new tpd */
2563 tpd->vcc = vcc;
2564 tpd->skb = NULL; /* not the last fragment
2568 __enqueue_tpd(he_dev, tpd, cid);
2569 tpd = __alloc_tpd(he_dev);
2570 if (tpd == NULL) {
2579 tpd->status |= TPD_USERCELL;
2583 tpd->iovec[slot].addr = skb_frag_dma_map(&he_dev->pci_dev->dev,
2585 tpd->iovec[slot].len = skb_frag_size(frag);
2590 tpd->iovec[slot - 1].len |= TPD_LST;
2592 tpd->address0 = dma_map_single(&he_dev->pci_dev->dev, skb->data, skb->len, DMA_TO_DEVICE);
2593 tpd->length0 = skb->len | TPD_LST;
2595 tpd->status |= TPD_INT;
2597 tpd->vcc = vcc;
2598 tpd->skb = skb;
2602 __enqueue_tpd(he_dev, tpd, cid);