Lines Matching refs:tcb
137 bnad_txq_cleanup(struct bnad *bnad, struct bna_tcb *tcb)
139 struct bnad_tx_unmap *unmap_q = tcb->unmap_q;
143 for (i = 0; i < tcb->q_depth; i++) {
147 bnad_tx_buff_unmap(bnad, unmap_q, tcb->q_depth, i);
159 bnad_txcmpl_process(struct bnad *bnad, struct bna_tcb *tcb)
163 struct bnad_tx_unmap *unmap_q = tcb->unmap_q;
168 if (!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))
171 hw_cons = *(tcb->hw_consumer_index);
173 cons = tcb->consumer_index;
174 q_depth = tcb->q_depth;
177 BUG_ON(!(wis <= BNA_QE_IN_USE_CNT(tcb, tcb->q_depth)));
195 tcb->consumer_index = hw_cons;
197 tcb->txq->tx_packets += sent_packets;
198 tcb->txq->tx_bytes += sent_bytes;
204 bnad_tx_complete(struct bnad *bnad, struct bna_tcb *tcb)
209 if (test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags))
212 sent = bnad_txcmpl_process(bnad, tcb);
216 BNA_QE_FREE_CNT(tcb, tcb->q_depth) >=
218 if (test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) {
225 if (likely(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)))
226 bna_ib_ack(tcb->i_dbell, sent);
229 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags);
238 struct bna_tcb *tcb = (struct bna_tcb *)data;
239 struct bnad *bnad = tcb->bnad;
241 bnad_tx_complete(bnad, tcb);
800 struct bna_tcb *tcb = NULL;
827 tcb = bnad->tx_info[i].tcb[j];
828 if (tcb && test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))
829 bnad_tx_complete(bnad, bnad->tx_info[i].tcb[j]);
953 struct bna_tcb *tcb =
954 bnad->tx_info[tx_id].tcb[tcb_id];
956 if (!tcb)
959 txq_id = tcb->id;
962 &tcb->flags)) {
999 bnad_cb_tcb_setup(struct bnad *bnad, struct bna_tcb *tcb)
1002 (struct bnad_tx_info *)tcb->txq->tx->priv;
1004 tcb->priv = tcb;
1005 tx_info->tcb[tcb->id] = tcb;
1009 bnad_cb_tcb_destroy(struct bnad *bnad, struct bna_tcb *tcb)
1012 (struct bnad_tx_info *)tcb->txq->tx->priv;
1014 tx_info->tcb[tcb->id] = NULL;
1015 tcb->priv = NULL;
1041 struct bna_tcb *tcb;
1046 tcb = tx_info->tcb[i];
1047 if (!tcb)
1049 txq_id = tcb->id;
1050 clear_bit(BNAD_TXQ_TX_STARTED, &tcb->flags);
1059 struct bna_tcb *tcb;
1064 tcb = tx_info->tcb[i];
1065 if (!tcb)
1067 txq_id = tcb->id;
1069 BUG_ON(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags));
1070 set_bit(BNAD_TXQ_TX_STARTED, &tcb->flags);
1071 BUG_ON(*(tcb->hw_consumer_index) != 0);
1099 struct bna_tcb *tcb;
1104 tcb = tx_info->tcb[i];
1105 if (!tcb)
1108 bnad = tcb->bnad;
1110 if (test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) {
1115 bnad_txq_cleanup(bnad, tcb);
1118 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags);
1136 struct bna_tcb *tcb;
1140 tcb = tx_info->tcb[i];
1141 if (!tcb)
1516 if (tx_info->tcb[i] == NULL)
1519 vector_num = tx_info->tcb[i]->intr_vector;
1520 free_irq(bnad->msix_table[vector_num].vector, tx_info->tcb[i]);
1536 vector_num = tx_info->tcb[i]->intr_vector;
1537 sprintf(tx_info->tcb[i]->name, "%s TXQ %d", bnad->netdev->name,
1538 tx_id + tx_info->tcb[i]->id);
1541 tx_info->tcb[i]->name,
1542 tx_info->tcb[i]);
1924 if (tx_info->tcb[0]->intr_type == BNA_INTR_T_MSIX)
2410 if (bnad->tx_info[i].tcb[j]) {
2412 bnad->tx_info[i].tcb[j]->txq->tx_packets;
2414 bnad->tx_info[i].tcb[j]->txq->tx_bytes;
2801 bnad_txq_wi_prepare(struct bnad *bnad, struct bna_tcb *tcb,
2813 vlan_tag = ((tcb->priority & 0x7) << VLAN_PRIO_SHIFT)
2914 struct bna_tcb *tcb = NULL;
2942 tcb = bnad->tx_info[0].tcb[txq_id];
2948 if (unlikely(!tcb || !test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) {
2954 q_depth = tcb->q_depth;
2955 prod = tcb->producer_index;
2956 unmap_q = tcb->unmap_q;
2968 if (unlikely(wis > BNA_QE_FREE_CNT(tcb, q_depth))) {
2969 if ((*tcb->hw_consumer_index != tcb->consumer_index) &&
2970 !test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) {
2972 sent = bnad_txcmpl_process(bnad, tcb);
2973 if (likely(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)))
2974 bna_ib_ack(tcb->i_dbell, sent);
2976 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags);
2988 if (likely(wis > BNA_QE_FREE_CNT(tcb, q_depth))) {
2997 txqent = &((struct bna_txq_entry *)tcb->sw_q)[prod];
3001 if (bnad_txq_wi_prepare(bnad, tcb, skb, txqent)) {
3030 /* Undo the changes starting at tcb->producer_index */
3032 tcb->producer_index);
3044 txqent = &((struct bna_txq_entry *)tcb->sw_q)[prod];
3052 /* Undo the changes starting at tcb->producer_index */
3054 tcb->producer_index);
3069 /* Undo the changes starting at tcb->producer_index */
3070 bnad_tx_buff_unmap(bnad, unmap_q, q_depth, tcb->producer_index);
3077 tcb->producer_index = prod;
3081 if (unlikely(!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)))
3086 bna_txq_prod_indx_doorbell(tcb);