Lines Matching defs:phy

260 		struct mt76_phy *phy = hw->priv;
262 if (skb == phy->test.tx_skb)
263 phy->test.tx_done++;
264 if (phy->test.tx_queued == phy->test.tx_done)
299 __mt76_tx_queue_skb(struct mt76_phy *phy, int qid, struct sk_buff *skb,
304 struct mt76_queue *q = phy->q_tx[qid];
305 struct mt76_dev *dev = phy->dev;
329 mt76_tx(struct mt76_phy *phy, struct ieee80211_sta *sta,
332 struct mt76_dev *dev = phy->dev;
338 if (mt76_testmode_enabled(phy)) {
339 ieee80211_free_txskb(phy->hw, skb);
359 info->hw_queue |= FIELD_PREP(MT_TX_HW_QUEUE_PHY, phy->band_idx);
360 q = phy->q_tx[qid];
363 __mt76_tx_queue_skb(phy, qid, skb, wcid, sta, NULL);
370 mt76_txq_dequeue(struct mt76_phy *phy, struct mt76_txq *mtxq)
376 skb = ieee80211_tx_dequeue(phy->hw, txq);
381 info->hw_queue |= FIELD_PREP(MT_TX_HW_QUEUE_PHY, phy->band_idx);
387 mt76_queue_ps_skb(struct mt76_phy *phy, struct ieee80211_sta *sta,
399 __mt76_tx_queue_skb(phy, MT_TXQ_PSD, skb, wcid, sta, NULL);
408 struct mt76_phy *phy = hw->priv;
409 struct mt76_dev *dev = phy->dev;
411 struct mt76_queue *hwq = phy->q_tx[MT_TXQ_PSD];
424 skb = mt76_txq_dequeue(phy, mtxq);
430 mt76_queue_ps_skb(phy, sta, last_skb, false);
437 mt76_queue_ps_skb(phy, sta, last_skb, true);
455 mt76_txq_send_burst(struct mt76_phy *phy, struct mt76_queue *q,
458 struct mt76_dev *dev = phy->dev;
473 skb = mt76_txq_dequeue(phy, mtxq);
483 idx = __mt76_tx_queue_skb(phy, qid, skb, wcid, txq->sta, &stop);
489 if (test_bit(MT76_RESET, &phy->state))
495 skb = mt76_txq_dequeue(phy, mtxq);
505 idx = __mt76_tx_queue_skb(phy, qid, skb, wcid, txq->sta, &stop);
521 mt76_txq_schedule_list(struct mt76_phy *phy, enum mt76_txq_id qid)
523 struct mt76_queue *q = phy->q_tx[qid];
524 struct mt76_dev *dev = phy->dev;
533 if (test_bit(MT76_RESET, &phy->state))
541 txq = ieee80211_next_txq(phy->hw, qid);
562 n_frames = mt76_txq_send_burst(phy, q, mtxq, wcid);
564 ieee80211_return_txq(phy->hw, txq, false);
575 void mt76_txq_schedule(struct mt76_phy *phy, enum mt76_txq_id qid)
586 ieee80211_txq_schedule_start(phy->hw, qid);
587 len = mt76_txq_schedule_list(phy, qid);
588 ieee80211_txq_schedule_end(phy->hw, qid);
596 void mt76_txq_schedule_all(struct mt76_phy *phy)
601 mt76_txq_schedule(phy, i);
607 struct mt76_phy *phy;
611 phy = dev->phys[i];
612 if (!phy)
615 mt76_txq_schedule_all(phy);
620 phy = dev->phys[i];
621 if (!phy || !phy->test.tx_pending)
624 mt76_testmode_tx_pending(phy);
637 void mt76_stop_tx_queues(struct mt76_phy *phy, struct ieee80211_sta *sta,
650 hwq = phy->q_tx[mt76_txq_get_qid(txq)];
662 struct mt76_phy *phy = hw->priv;
663 struct mt76_dev *dev = phy->dev;
665 if (!test_bit(MT76_STATE_RUNNING, &phy->state))
728 struct mt76_phy *phy = &dev->phy;
729 struct mt76_queue *q = phy->q_tx[0];
736 phy = dev->phys[MT_BAND1];
737 if (phy) {
738 q = phy->q_tx[0];
741 phy = dev->phys[MT_BAND2];
742 if (phy) {
743 q = phy->q_tx[0];
815 dev->phy.q_tx[0]->blocked)