Lines Matching refs:tcb
137 bnad_txq_cleanup(struct bnad *bnad, struct bna_tcb *tcb)
139 struct bnad_tx_unmap *unmap_q = tcb->unmap_q;
143 for (i = 0; i < tcb->q_depth; i++) {
147 bnad_tx_buff_unmap(bnad, unmap_q, tcb->q_depth, i);
159 bnad_txcmpl_process(struct bnad *bnad, struct bna_tcb *tcb)
163 struct bnad_tx_unmap *unmap_q = tcb->unmap_q;
168 if (!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))
171 hw_cons = *(tcb->hw_consumer_index);
173 cons = tcb->consumer_index;
174 q_depth = tcb->q_depth;
177 BUG_ON(!(wis <= BNA_QE_IN_USE_CNT(tcb, tcb->q_depth)));
195 tcb->consumer_index = hw_cons;
197 tcb->txq->tx_packets += sent_packets;
198 tcb->txq->tx_bytes += sent_bytes;
204 bnad_tx_complete(struct bnad *bnad, struct bna_tcb *tcb)
209 if (test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags))
212 sent = bnad_txcmpl_process(bnad, tcb);
216 BNA_QE_FREE_CNT(tcb, tcb->q_depth) >=
218 if (test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) {
225 if (likely(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)))
226 bna_ib_ack(tcb->i_dbell, sent);
229 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags);
238 struct bna_tcb *tcb = (struct bna_tcb *)data;
239 struct bnad *bnad = tcb->bnad;
241 bnad_tx_complete(bnad, tcb);
800 struct bna_tcb *tcb = NULL;
827 tcb = bnad->tx_info[i].tcb[j];
828 if (tcb && test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))
829 bnad_tx_complete(bnad, bnad->tx_info[i].tcb[j]);
953 struct bna_tcb *tcb =
954 bnad->tx_info[tx_id].tcb[tcb_id];
956 if (!tcb)
959 txq_id = tcb->id;
962 &tcb->flags)) {
999 bnad_cb_tcb_setup(struct bnad *bnad, struct bna_tcb *tcb)
1002 (struct bnad_tx_info *)tcb->txq->tx->priv;
1004 tcb->priv = tcb;
1005 tx_info->tcb[tcb->id] = tcb;
1009 bnad_cb_tcb_destroy(struct bnad *bnad, struct bna_tcb *tcb)
1012 (struct bnad_tx_info *)tcb->txq->tx->priv;
1014 tx_info->tcb[tcb->id] = NULL;
1015 tcb->priv = NULL;
1042 struct bna_tcb *tcb;
1047 tcb = tx_info->tcb[i];
1048 if (!tcb)
1050 txq_id = tcb->id;
1051 clear_bit(BNAD_TXQ_TX_STARTED, &tcb->flags);
1060 struct bna_tcb *tcb;
1065 tcb = tx_info->tcb[i];
1066 if (!tcb)
1068 txq_id = tcb->id;
1070 BUG_ON(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags));
1071 set_bit(BNAD_TXQ_TX_STARTED, &tcb->flags);
1072 BUG_ON(*(tcb->hw_consumer_index) != 0);
1100 struct bna_tcb *tcb;
1105 tcb = tx_info->tcb[i];
1106 if (!tcb)
1109 bnad = tcb->bnad;
1111 if (test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) {
1116 bnad_txq_cleanup(bnad, tcb);
1119 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags);
1137 struct bna_tcb *tcb;
1141 tcb = tx_info->tcb[i];
1142 if (!tcb)
1517 if (tx_info->tcb[i] == NULL)
1520 vector_num = tx_info->tcb[i]->intr_vector;
1521 free_irq(bnad->msix_table[vector_num].vector, tx_info->tcb[i]);
1537 vector_num = tx_info->tcb[i]->intr_vector;
1538 sprintf(tx_info->tcb[i]->name, "%s TXQ %d", bnad->netdev->name,
1539 tx_id + tx_info->tcb[i]->id);
1542 tx_info->tcb[i]->name,
1543 tx_info->tcb[i]);
1926 if (tx_info->tcb[0]->intr_type == BNA_INTR_T_MSIX)
2412 if (bnad->tx_info[i].tcb[j]) {
2414 bnad->tx_info[i].tcb[j]->txq->tx_packets;
2416 bnad->tx_info[i].tcb[j]->txq->tx_bytes;
2803 bnad_txq_wi_prepare(struct bnad *bnad, struct bna_tcb *tcb,
2815 vlan_tag = ((tcb->priority & 0x7) << VLAN_PRIO_SHIFT)
2918 struct bna_tcb *tcb = NULL;
2946 tcb = bnad->tx_info[0].tcb[txq_id];
2952 if (unlikely(!tcb || !test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) {
2958 q_depth = tcb->q_depth;
2959 prod = tcb->producer_index;
2960 unmap_q = tcb->unmap_q;
2972 if (unlikely(wis > BNA_QE_FREE_CNT(tcb, q_depth))) {
2973 if ((*tcb->hw_consumer_index != tcb->consumer_index) &&
2974 !test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) {
2976 sent = bnad_txcmpl_process(bnad, tcb);
2977 if (likely(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)))
2978 bna_ib_ack(tcb->i_dbell, sent);
2980 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags);
2992 if (likely(wis > BNA_QE_FREE_CNT(tcb, q_depth))) {
3001 txqent = &((struct bna_txq_entry *)tcb->sw_q)[prod];
3005 if (bnad_txq_wi_prepare(bnad, tcb, skb, txqent)) {
3034 /* Undo the changes starting at tcb->producer_index */
3036 tcb->producer_index);
3048 txqent = &((struct bna_txq_entry *)tcb->sw_q)[prod];
3056 /* Undo the changes starting at tcb->producer_index */
3058 tcb->producer_index);
3073 /* Undo the changes starting at tcb->producer_index */
3074 bnad_tx_buff_unmap(bnad, unmap_q, q_depth, tcb->producer_index);
3081 tcb->producer_index = prod;
3085 if (unlikely(!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)))
3090 bna_txq_prod_indx_doorbell(tcb);