Lines Matching refs:ah

97 static int ath5k_reset(struct ath5k_hw *ah, struct ieee80211_channel *chan,
201 static inline u64 ath5k_extend_tsf(struct ath5k_hw *ah, u32 rstamp)
203 u64 tsf = ath5k_hw_get_tsf64(ah);
234 struct ath5k_hw *ah = (struct ath5k_hw *) hw_priv;
235 return ath5k_hw_reg_read(ah, reg_offset);
240 struct ath5k_hw *ah = (struct ath5k_hw *) hw_priv;
241 ath5k_hw_reg_write(ah, val, reg_offset);
257 struct ath5k_hw *ah = hw->priv;
258 struct ath_regulatory *regulatory = ath5k_hw_regulatory(ah);
296 ath5k_setup_channels(struct ath5k_hw *ah, struct ieee80211_channel *channels,
314 ATH5K_WARN(ah, "bad mode, not copying channels\n");
331 if (!ath5k_channel_ok(ah, &channels[count]))
344 ath5k_setup_rate_idx(struct ath5k_hw *ah, struct ieee80211_supported_band *b)
349 ah->rate_idx[b->band][i] = -1;
352 ah->rate_idx[b->band][b->bitrates[i].hw_value] = i;
354 ah->rate_idx[b->band][b->bitrates[i].hw_value_short] = i;
361 struct ath5k_hw *ah = hw->priv;
366 BUILD_BUG_ON(ARRAY_SIZE(ah->sbands) < NUM_NL80211_BANDS);
367 max_c = ARRAY_SIZE(ah->channels);
370 sband = &ah->sbands[NL80211_BAND_2GHZ];
372 sband->bitrates = &ah->rates[NL80211_BAND_2GHZ][0];
374 if (test_bit(AR5K_MODE_11G, ah->ah_capabilities.cap_mode)) {
380 sband->channels = ah->channels;
381 sband->n_channels = ath5k_setup_channels(ah, sband->channels,
387 } else if (test_bit(AR5K_MODE_11B, ah->ah_capabilities.cap_mode)) {
397 if (ah->ah_version == AR5K_AR5211) {
406 sband->channels = ah->channels;
407 sband->n_channels = ath5k_setup_channels(ah, sband->channels,
414 ath5k_setup_rate_idx(ah, sband);
417 if (test_bit(AR5K_MODE_11A, ah->ah_capabilities.cap_mode)) {
418 sband = &ah->sbands[NL80211_BAND_5GHZ];
420 sband->bitrates = &ah->rates[NL80211_BAND_5GHZ][0];
426 sband->channels = &ah->channels[count_c];
427 sband->n_channels = ath5k_setup_channels(ah, sband->channels,
432 ath5k_setup_rate_idx(ah, sband);
434 ath5k_debug_dump_bands(ah);
444 * Called with ah->lock.
447 ath5k_chan_set(struct ath5k_hw *ah, struct cfg80211_chan_def *chandef)
449 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
451 ah->curchan->center_freq, chandef->chan->center_freq);
456 ah->ah_bwmode = AR5K_BWMODE_DEFAULT;
459 ah->ah_bwmode = AR5K_BWMODE_5MHZ;
462 ah->ah_bwmode = AR5K_BWMODE_10MHZ;
475 return ath5k_reset(ah, chandef->chan, true);
519 ath5k_update_bssid_mask_and_opmode(struct ath5k_hw *ah,
522 struct ath_common *common = ath5k_hw_common(ah);
542 ah->hw, IEEE80211_IFACE_ITER_RESUME_ALL,
544 memcpy(ah->bssidmask, iter_data.mask, ETH_ALEN);
546 ah->opmode = iter_data.opmode;
547 if (ah->opmode == NL80211_IFTYPE_UNSPECIFIED)
549 ah->opmode = NL80211_IFTYPE_STATION;
551 ath5k_hw_set_opmode(ah, ah->opmode);
552 ATH5K_DBG(ah, ATH5K_DEBUG_MODE, "mode setup opmode %d (%s)\n",
553 ah->opmode, ath_opmode_to_string(ah->opmode));
556 ath5k_hw_set_lladdr(ah, iter_data.active_mac);
558 if (ath5k_hw_hasbssidmask(ah))
559 ath5k_hw_set_bssid_mask(ah, ah->bssidmask);
567 ah->filter_flags |= AR5K_RX_FILTER_PROM;
570 rfilt = ah->filter_flags;
571 ath5k_hw_set_rx_filter(ah, rfilt);
572 ATH5K_DBG(ah, ATH5K_DEBUG_MODE, "RX filter 0x%x\n", rfilt);
576 ath5k_hw_to_driver_rix(struct ath5k_hw *ah, int hw_rix)
585 rix = ah->rate_idx[ah->curchan->band][hw_rix];
597 struct sk_buff *ath5k_rx_skb_alloc(struct ath5k_hw *ah, dma_addr_t *skb_addr)
599 struct ath_common *common = ath5k_hw_common(ah);
611 ATH5K_ERR(ah, "can't alloc skbuff of size %u\n",
616 *skb_addr = dma_map_single(ah->dev,
620 if (unlikely(dma_mapping_error(ah->dev, *skb_addr))) {
621 ATH5K_ERR(ah, "%s: DMA mapping failed\n", __func__);
629 ath5k_rxbuf_setup(struct ath5k_hw *ah, struct ath5k_buf *bf)
636 skb = ath5k_rx_skb_alloc(ah, &bf->skbaddr);
660 ret = ath5k_hw_setup_rx_desc(ah, ds, ah->common.rx_bufsize, 0);
662 ATH5K_ERR(ah, "%s: could not setup RX desc\n", __func__);
666 if (ah->rxlink != NULL)
667 *ah->rxlink = bf->daddr;
668 ah->rxlink = &ds->ds_link;
732 ath5k_txbuf_setup(struct ath5k_hw *ah, struct ath5k_buf *bf,
751 bf->skbaddr = dma_map_single(ah->dev, skb->data, skb->len,
754 if (dma_mapping_error(ah->dev, bf->skbaddr))
760 rate = ath5k_get_rate(ah->hw, info, bf, 0);
772 hw_rate = ath5k_get_rate_hw_value(ah->hw, info, bf, 0);
777 * subtract ah->ah_txpower.txp_cck_ofdm_pwr_delta
785 cts_rate = ieee80211_get_rts_cts_rate(ah->hw, info)->hw_value;
786 duration = le16_to_cpu(ieee80211_rts_duration(ah->hw,
791 cts_rate = ieee80211_get_rts_cts_rate(ah->hw, info)->hw_value;
792 duration = le16_to_cpu(ieee80211_ctstoself_duration(ah->hw,
796 ret = ah->ah_setup_tx_desc(ah, ds, pktlen,
799 (ah->ah_txpower.txp_requested * 2),
801 bf->rates[0].count, keyidx, ah->ah_tx_ant, flags,
807 if (ah->ah_capabilities.cap_has_mrr_support) {
813 rate = ath5k_get_rate(ah->hw, info, bf, i);
817 mrr_rate[i] = ath5k_get_rate_hw_value(ah->hw, info, bf, i);
821 ath5k_hw_setup_mrr_tx_desc(ah, ds,
834 ath5k_hw_set_txdp(ah, txq->qnum, bf->daddr);
839 ath5k_hw_start_tx_dma(ah, txq->qnum);
844 dma_unmap_single(ah->dev, bf->skbaddr, skb->len, DMA_TO_DEVICE);
853 ath5k_desc_alloc(struct ath5k_hw *ah)
862 ah->desc_len = sizeof(struct ath5k_desc) *
865 ah->desc = dma_alloc_coherent(ah->dev, ah->desc_len,
866 &ah->desc_daddr, GFP_KERNEL);
867 if (ah->desc == NULL) {
868 ATH5K_ERR(ah, "can't allocate descriptors\n");
872 ds = ah->desc;
873 da = ah->desc_daddr;
874 ATH5K_DBG(ah, ATH5K_DEBUG_ANY, "DMA map: %p (%zu) -> %llx\n",
875 ds, ah->desc_len, (unsigned long long)ah->desc_daddr);
880 ATH5K_ERR(ah, "can't allocate bufptr\n");
884 ah->bufptr = bf;
886 INIT_LIST_HEAD(&ah->rxbuf);
890 list_add_tail(&bf->list, &ah->rxbuf);
893 INIT_LIST_HEAD(&ah->txbuf);
894 ah->txbuf_len = ATH_TXBUF;
898 list_add_tail(&bf->list, &ah->txbuf);
902 INIT_LIST_HEAD(&ah->bcbuf);
906 list_add_tail(&bf->list, &ah->bcbuf);
911 dma_free_coherent(ah->dev, ah->desc_len, ah->desc, ah->desc_daddr);
913 ah->desc = NULL;
918 ath5k_txbuf_free_skb(struct ath5k_hw *ah, struct ath5k_buf *bf)
923 dma_unmap_single(ah->dev, bf->skbaddr, bf->skb->len,
925 ieee80211_free_txskb(ah->hw, bf->skb);
932 ath5k_rxbuf_free_skb(struct ath5k_hw *ah, struct ath5k_buf *bf)
934 struct ath_common *common = ath5k_hw_common(ah);
939 dma_unmap_single(ah->dev, bf->skbaddr, common->rx_bufsize,
948 ath5k_desc_free(struct ath5k_hw *ah)
952 list_for_each_entry(bf, &ah->txbuf, list)
953 ath5k_txbuf_free_skb(ah, bf);
954 list_for_each_entry(bf, &ah->rxbuf, list)
955 ath5k_rxbuf_free_skb(ah, bf);
956 list_for_each_entry(bf, &ah->bcbuf, list)
957 ath5k_txbuf_free_skb(ah, bf);
960 dma_free_coherent(ah->dev, ah->desc_len, ah->desc, ah->desc_daddr);
961 ah->desc = NULL;
962 ah->desc_daddr = 0;
964 kfree(ah->bufptr);
965 ah->bufptr = NULL;
974 ath5k_txq_setup(struct ath5k_hw *ah,
1002 qnum = ath5k_hw_setup_tx_queue(ah, qtype, &qi);
1010 txq = &ah->txqs[qnum];
1022 return &ah->txqs[qnum];
1026 ath5k_beaconq_setup(struct ath5k_hw *ah)
1038 return ath5k_hw_setup_tx_queue(ah, AR5K_TX_QUEUE_BEACON, &qi);
1042 ath5k_beaconq_config(struct ath5k_hw *ah)
1047 ret = ath5k_hw_get_tx_queueprops(ah, ah->bhalq, &qi);
1051 if (ah->opmode == NL80211_IFTYPE_AP ||
1052 ah->opmode == NL80211_IFTYPE_MESH_POINT) {
1060 } else if (ah->opmode == NL80211_IFTYPE_ADHOC) {
1069 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
1073 ret = ath5k_hw_set_tx_queueprops(ah, ah->bhalq, &qi);
1075 ATH5K_ERR(ah, "%s: unable to update parameters for beacon "
1079 ret = ath5k_hw_reset_tx_queue(ah, ah->bhalq); /* push to h/w */
1084 ret = ath5k_hw_get_tx_queueprops(ah, AR5K_TX_QUEUE_ID_CAB, &qi);
1088 qi.tqi_ready_time = (ah->bintval * 80) / 100;
1089 ret = ath5k_hw_set_tx_queueprops(ah, AR5K_TX_QUEUE_ID_CAB, &qi);
1093 ret = ath5k_hw_reset_tx_queue(ah, AR5K_TX_QUEUE_ID_CAB);
1101 * @ah: The &struct ath5k_hw
1110 ath5k_drain_tx_buffs(struct ath5k_hw *ah)
1116 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++) {
1117 if (ah->txqs[i].setup) {
1118 txq = &ah->txqs[i];
1121 ath5k_debug_printtxbuf(ah, bf);
1123 ath5k_txbuf_free_skb(ah, bf);
1125 spin_lock(&ah->txbuflock);
1126 list_move_tail(&bf->list, &ah->txbuf);
1127 ah->txbuf_len++;
1129 spin_unlock(&ah->txbuflock);
1139 ath5k_txq_release(struct ath5k_hw *ah)
1141 struct ath5k_txq *txq = ah->txqs;
1144 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++, txq++)
1146 ath5k_hw_release_tx_queue(ah, txq->qnum);
1160 ath5k_rx_start(struct ath5k_hw *ah)
1162 struct ath_common *common = ath5k_hw_common(ah);
1168 ATH5K_DBG(ah, ATH5K_DEBUG_RESET, "cachelsz %u rx_bufsize %u\n",
1171 spin_lock_bh(&ah->rxbuflock);
1172 ah->rxlink = NULL;
1173 list_for_each_entry(bf, &ah->rxbuf, list) {
1174 ret = ath5k_rxbuf_setup(ah, bf);
1176 spin_unlock_bh(&ah->rxbuflock);
1180 bf = list_first_entry(&ah->rxbuf, struct ath5k_buf, list);
1181 ath5k_hw_set_rxdp(ah, bf->daddr);
1182 spin_unlock_bh(&ah->rxbuflock);
1184 ath5k_hw_start_rx_dma(ah); /* enable recv descriptors */
1185 ath5k_update_bssid_mask_and_opmode(ah, NULL); /* set filters, etc. */
1186 ath5k_hw_start_rx_pcu(ah); /* re-enable PCU/DMA engine */
1201 ath5k_rx_stop(struct ath5k_hw *ah)
1204 ath5k_hw_set_rx_filter(ah, 0); /* clear recv filter */
1205 ath5k_hw_stop_rx_pcu(ah); /* disable PCU */
1207 ath5k_debug_printrxbuffs(ah);
1211 ath5k_rx_decrypted(struct ath5k_hw *ah, struct sk_buff *skb,
1214 struct ath_common *common = ath5k_hw_common(ah);
1240 ath5k_check_ibss_tsf(struct ath5k_hw *ah, struct sk_buff *skb,
1253 tsf = ath5k_hw_get_tsf64(ah);
1257 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
1276 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
1289 if (hw_tu >= ah->nexttbtt)
1290 ath5k_beacon_update_timers(ah, bc_tstamp);
1295 if (!ath5k_hw_check_beacon_timers(ah, ah->bintval)) {
1296 ath5k_beacon_update_timers(ah, bc_tstamp);
1297 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
1371 ath5k_receive_frame(struct ath5k_hw *ah, struct sk_buff *skb,
1375 struct ath_common *common = ath5k_hw_common(ah);
1397 rxs->mactime = ath5k_extend_tsf(ah, rs->rs_tstamp);
1400 rxs->freq = ah->curchan->center_freq;
1401 rxs->band = ah->curchan->band;
1403 rxs->signal = ah->ah_noise_floor + rs->rs_rssi;
1408 ah->stats.antenna_rx[rs->rs_antenna]++;
1410 ah->stats.antenna_rx[0]++; /* invalid */
1412 rxs->rate_idx = ath5k_hw_to_driver_rix(ah, rs->rs_rate);
1413 rxs->flag |= ath5k_rx_decrypted(ah, skb, rs);
1414 switch (ah->ah_bwmode) {
1426 ah->sbands[ah->curchan->band].bitrates[rxs->rate_idx].hw_value_short)
1429 trace_ath5k_rx(ah, skb);
1432 ewma_beacon_rssi_add(&ah->ah_beacon_rssi_avg, rs->rs_rssi);
1435 if (ah->opmode == NL80211_IFTYPE_ADHOC)
1436 ath5k_check_ibss_tsf(ah, skb, rxs);
1439 ieee80211_rx(ah->hw, skb);
1448 ath5k_receive_frame_ok(struct ath5k_hw *ah, struct ath5k_rx_status *rs)
1450 ah->stats.rx_all_count++;
1451 ah->stats.rx_bytes_count += rs->rs_datalen;
1457 ah->stats.rxerr_crc++;
1459 ah->stats.rxerr_fifo++;
1461 ah->stats.rxerr_phy++;
1463 ah->stats.rxerr_phy_code[rs->rs_phyerr]++;
1490 ah->stats.rxerr_decrypt++;
1496 ah->stats.rxerr_mic++;
1505 if (ah->fif_filter_flags & FIF_FCSFAIL)
1513 ah->stats.rxerr_jumbo++;
1520 ath5k_set_current_imask(struct ath5k_hw *ah)
1525 if (test_bit(ATH_STAT_RESET, ah->status))
1528 spin_lock_irqsave(&ah->irqlock, flags);
1529 imask = ah->imask;
1530 if (ah->rx_pending)
1532 if (ah->tx_pending)
1534 ath5k_hw_set_imr(ah, imask);
1535 spin_unlock_irqrestore(&ah->irqlock, flags);
1544 struct ath5k_hw *ah = from_tasklet(ah, t, rxtq);
1545 struct ath_common *common = ath5k_hw_common(ah);
1550 spin_lock(&ah->rxbuflock);
1551 if (list_empty(&ah->rxbuf)) {
1552 ATH5K_WARN(ah, "empty rx buf pool\n");
1556 bf = list_first_entry(&ah->rxbuf, struct ath5k_buf, list);
1562 if (ath5k_hw_get_rxdp(ah) == bf->daddr)
1565 ret = ah->ah_proc_rx_desc(ah, ds, &rs);
1569 ATH5K_ERR(ah, "error in processing rx descriptor\n");
1570 ah->stats.rxerr_proc++;
1574 if (ath5k_receive_frame_ok(ah, &rs)) {
1575 next_skb = ath5k_rx_skb_alloc(ah, &next_skb_addr);
1584 dma_unmap_single(ah->dev, bf->skbaddr,
1590 ath5k_receive_frame(ah, skb, &rs);
1596 list_move_tail(&bf->list, &ah->rxbuf);
1597 } while (ath5k_rxbuf_setup(ah, bf) == 0);
1599 spin_unlock(&ah->rxbuflock);
1600 ah->rx_pending = false;
1601 ath5k_set_current_imask(ah);
1613 struct ath5k_hw *ah = hw->priv;
1618 trace_ath5k_tx(ah, skb, txq);
1626 ATH5K_ERR(ah, "tx hdrlen not %%4: not enough"
1635 spin_lock_irqsave(&ah->txbuflock, flags);
1636 if (list_empty(&ah->txbuf)) {
1637 ATH5K_ERR(ah, "no further txbuf available, dropping packet\n");
1638 spin_unlock_irqrestore(&ah->txbuflock, flags);
1642 bf = list_first_entry(&ah->txbuf, struct ath5k_buf, list);
1644 ah->txbuf_len--;
1645 if (list_empty(&ah->txbuf))
1647 spin_unlock_irqrestore(&ah->txbuflock, flags);
1651 if (ath5k_txbuf_setup(ah, bf, txq, padsize, control)) {
1653 spin_lock_irqsave(&ah->txbuflock, flags);
1654 list_add_tail(&bf->list, &ah->txbuf);
1655 ah->txbuf_len++;
1656 spin_unlock_irqrestore(&ah->txbuflock, flags);
1666 ath5k_tx_frame_completed(struct ath5k_hw *ah, struct sk_buff *skb,
1675 ah->stats.tx_all_count++;
1676 ah->stats.tx_bytes_count += skb->len;
1699 ah->stats.ack_fail++;
1702 ah->stats.txerr_filt++;
1705 ah->stats.txerr_retry++;
1707 ah->stats.txerr_fifo++;
1723 ah->stats.antenna_tx[ts->ts_antenna]++;
1725 ah->stats.antenna_tx[0]++; /* invalid */
1727 trace_ath5k_tx_complete(ah, skb, txq, ts);
1728 ieee80211_tx_status(ah->hw, skb);
1732 ath5k_tx_processq(struct ath5k_hw *ah, struct ath5k_txq *txq)
1749 ret = ah->ah_proc_tx_desc(ah, ds, &ts);
1753 ATH5K_ERR(ah,
1762 dma_unmap_single(ah->dev, bf->skbaddr, skb->len,
1764 ath5k_tx_frame_completed(ah, skb, txq, &ts, bf);
1773 if (ath5k_hw_get_txdp(ah, txq->qnum) != bf->daddr) {
1774 spin_lock(&ah->txbuflock);
1775 list_move_tail(&bf->list, &ah->txbuf);
1776 ah->txbuf_len++;
1778 spin_unlock(&ah->txbuflock);
1783 ieee80211_wake_queue(ah->hw, txq->qnum);
1790 struct ath5k_hw *ah = from_tasklet(ah, t, txtq);
1793 if (ah->txqs[i].setup && (ah->ah_txq_isr_txok_all & BIT(i)))
1794 ath5k_tx_processq(ah, &ah->txqs[i]);
1796 ah->tx_pending = false;
1797 ath5k_set_current_imask(ah);
1809 ath5k_beacon_setup(struct ath5k_hw *ah, struct ath5k_buf *bf)
1819 bf->skbaddr = dma_map_single(ah->dev, skb->data, skb->len,
1821 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON, "skb %p [data %p len %u] "
1825 if (dma_mapping_error(ah->dev, bf->skbaddr)) {
1826 ATH5K_ERR(ah, "beacon DMA mapping failed\n");
1833 antenna = ah->ah_tx_ant;
1836 if (ah->opmode == NL80211_IFTYPE_ADHOC && ath5k_hw_hasveol(ah)) {
1860 if (ah->ah_ant_mode == AR5K_ANTMODE_SECTOR_AP)
1861 antenna = ah->bsent & 4 ? 2 : 1;
1865 * subtract ah->ah_txpower.txp_cck_ofdm_pwr_delta
1868 ret = ah->ah_setup_tx_desc(ah, ds, skb->len,
1871 (ah->ah_txpower.txp_requested * 2),
1872 ieee80211_get_tx_rate(ah->hw, info)->hw_value,
1880 dma_unmap_single(ah->dev, bf->skbaddr, skb->len, DMA_TO_DEVICE);
1895 struct ath5k_hw *ah = hw->priv;
1912 ath5k_txbuf_free_skb(ah, avf->bbuf);
1914 ret = ath5k_beacon_setup(ah, avf->bbuf);
1928 ath5k_beacon_send(struct ath5k_hw *ah)
1936 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON, "in beacon_send\n");
1945 if (unlikely(ath5k_hw_num_tx_pending(ah, ah->bhalq) != 0)) {
1946 ah->bmisscount++;
1947 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
1948 "missed %u consecutive beacons\n", ah->bmisscount);
1949 if (ah->bmisscount > 10) { /* NB: 10 is a guess */
1950 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
1952 ah->bmisscount);
1953 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
1955 ieee80211_queue_work(ah->hw, &ah->reset_work);
1959 if (unlikely(ah->bmisscount != 0)) {
1960 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
1962 ah->bmisscount);
1963 ah->bmisscount = 0;
1966 if ((ah->opmode == NL80211_IFTYPE_AP && ah->num_ap_vifs +
1967 ah->num_mesh_vifs > 1) ||
1968 ah->opmode == NL80211_IFTYPE_MESH_POINT) {
1969 u64 tsf = ath5k_hw_get_tsf64(ah);
1971 int slot = ((tsftu % ah->bintval) * ATH_BCBUF) / ah->bintval;
1972 vif = ah->bslot[(slot + 1) % ATH_BCBUF];
1973 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
1975 (unsigned long long)tsf, tsftu, ah->bintval, slot, vif);
1977 vif = ah->bslot[0];
1990 if (unlikely(ath5k_hw_stop_beacon_queue(ah, ah->bhalq))) {
1991 ATH5K_WARN(ah, "beacon queue %u didn't start/stop ?\n", ah->bhalq);
1996 if (ah->opmode == NL80211_IFTYPE_AP ||
1997 ah->opmode == NL80211_IFTYPE_MESH_POINT) {
1998 err = ath5k_beacon_update(ah->hw, vif);
2003 if (unlikely(bf->skb == NULL || ah->opmode == NL80211_IFTYPE_STATION ||
2004 ah->opmode == NL80211_IFTYPE_MONITOR)) {
2005 ATH5K_WARN(ah, "bf=%p bf_skb=%p\n", bf, bf->skb);
2009 trace_ath5k_tx(ah, bf->skb, &ah->txqs[ah->bhalq]);
2011 ath5k_hw_set_txdp(ah, ah->bhalq, bf->daddr);
2012 ath5k_hw_start_tx_dma(ah, ah->bhalq);
2013 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON, "TXDP[%u] = %llx (%p)\n",
2014 ah->bhalq, (unsigned long long)bf->daddr, bf->desc);
2016 skb = ieee80211_get_buffered_bc(ah->hw, vif);
2018 ath5k_tx_queue(ah->hw, skb, ah->cabq, NULL);
2020 if (ah->cabq->txq_len >= ah->cabq->txq_max)
2023 skb = ieee80211_get_buffered_bc(ah->hw, vif);
2026 ah->bsent++;
2032 * @ah: struct ath5k_hw pointer we are operating on
2046 ath5k_beacon_update_timers(struct ath5k_hw *ah, u64 bc_tsf)
2051 intval = ah->bintval & AR5K_BEACON_PERIOD;
2052 if (ah->opmode == NL80211_IFTYPE_AP && ah->num_ap_vifs
2053 + ah->num_mesh_vifs > 1) {
2056 ATH5K_WARN(ah, "intval %u is too low, min 15\n",
2066 hw_tsf = ath5k_hw_get_tsf64(ah);
2095 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
2110 ah->nexttbtt = nexttbtt;
2113 ath5k_hw_init_beacon_timers(ah, nexttbtt, intval);
2120 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
2123 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
2126 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
2129 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON,
2133 ATH5K_DBG_UNLIMIT(ah, ATH5K_DEBUG_BEACON, "intval %u %s %s\n",
2142 * @ah: struct ath5k_hw pointer we are operating on
2148 ath5k_beacon_config(struct ath5k_hw *ah)
2150 spin_lock_bh(&ah->block);
2151 ah->bmisscount = 0;
2152 ah->imask &= ~(AR5K_INT_BMISS | AR5K_INT_SWBA);
2154 if (ah->enable_beacon) {
2162 ath5k_beaconq_config(ah);
2164 ah->imask |= AR5K_INT_SWBA;
2166 if (ah->opmode == NL80211_IFTYPE_ADHOC) {
2167 if (ath5k_hw_hasveol(ah))
2168 ath5k_beacon_send(ah);
2170 ath5k_beacon_update_timers(ah, -1);
2172 ath5k_hw_stop_beacon_queue(ah, ah->bhalq);
2175 ath5k_hw_set_imr(ah, ah->imask);
2176 spin_unlock_bh(&ah->block);
2181 struct ath5k_hw *ah = from_tasklet(ah, t, beacontq);
2191 if (ah->opmode == NL80211_IFTYPE_ADHOC) {
2193 u64 tsf = ath5k_hw_get_tsf64(ah);
2194 ah->nexttbtt += ah->bintval;
2195 ATH5K_DBG(ah, ATH5K_DEBUG_BEACON,
2198 ah->nexttbtt,
2202 spin_lock(&ah->block);
2203 ath5k_beacon_send(ah);
2204 spin_unlock(&ah->block);
2214 ath5k_intr_calibration_poll(struct ath5k_hw *ah)
2216 if (time_is_before_eq_jiffies(ah->ah_cal_next_ani) &&
2217 !(ah->ah_cal_mask & AR5K_CALIBRATION_FULL) &&
2218 !(ah->ah_cal_mask & AR5K_CALIBRATION_SHORT)) {
2222 ah->ah_cal_next_ani = jiffies +
2224 tasklet_schedule(&ah->ani_tasklet);
2226 } else if (time_is_before_eq_jiffies(ah->ah_cal_next_short) &&
2227 !(ah->ah_cal_mask & AR5K_CALIBRATION_FULL) &&
2228 !(ah->ah_cal_mask & AR5K_CALIBRATION_SHORT)) {
2237 ah->ah_cal_next_short = jiffies +
2239 ieee80211_queue_work(ah->hw, &ah->calib_work);
2243 * AR5K_REG_ENABLE_BITS(ah, AR5K_CR, AR5K_CR_SWI); */
2247 ath5k_schedule_rx(struct ath5k_hw *ah)
2249 ah->rx_pending = true;
2250 tasklet_schedule(&ah->rxtq);
2254 ath5k_schedule_tx(struct ath5k_hw *ah)
2256 ah->tx_pending = true;
2257 tasklet_schedule(&ah->txtq);
2263 struct ath5k_hw *ah = dev_id;
2278 if (unlikely(test_bit(ATH_STAT_INVALID, ah->status) ||
2279 ((ath5k_get_bus_type(ah) != ATH_AHB) &&
2280 !ath5k_hw_is_intr_pending(ah))))
2285 ath5k_hw_get_isr(ah, &status); /* NB: clears IRQ too */
2287 ATH5K_DBG(ah, ATH5K_DEBUG_INTR, "status 0x%x/0x%x\n",
2288 status, ah->imask);
2299 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2301 ieee80211_queue_work(ah->hw, &ah->reset_work);
2318 ah->stats.rxorn_intr++;
2320 if (ah->ah_mac_srev < AR5K_SREV_AR5212) {
2321 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2323 ieee80211_queue_work(ah->hw, &ah->reset_work);
2325 ath5k_schedule_rx(ah);
2331 tasklet_hi_schedule(&ah->beacontq);
2341 ah->stats.rxeol_intr++;
2346 ath5k_hw_update_tx_triglevel(ah, true);
2350 ath5k_schedule_rx(ah);
2357 ath5k_schedule_tx(ah);
2365 ah->stats.mib_intr++;
2366 ath5k_hw_update_mib_counters(ah);
2367 ath5k_ani_mib_intr(ah);
2372 tasklet_schedule(&ah->rf_kill.toggleq);
2376 if (ath5k_get_bus_type(ah) == ATH_AHB)
2379 } while (ath5k_hw_is_intr_pending(ah) && --counter > 0);
2384 * NOTE: ah->(rx/tx)_pending are set when scheduling the tasklets
2387 if (ah->rx_pending || ah->tx_pending)
2388 ath5k_set_current_imask(ah);
2391 ATH5K_WARN(ah, "too many interrupts, giving up for now\n");
2394 ath5k_intr_calibration_poll(ah);
2406 struct ath5k_hw *ah = container_of(work, struct ath5k_hw,
2410 if (time_is_before_eq_jiffies(ah->ah_cal_next_full)) {
2412 ah->ah_cal_next_full = jiffies +
2414 ah->ah_cal_mask |= AR5K_CALIBRATION_FULL;
2416 ATH5K_DBG(ah, ATH5K_DEBUG_CALIBRATE,
2419 if (ath5k_hw_gainf_calibrate(ah) == AR5K_RFGAIN_NEED_CHANGE) {
2424 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2426 ieee80211_queue_work(ah->hw, &ah->reset_work);
2429 ah->ah_cal_mask |= AR5K_CALIBRATION_SHORT;
2432 ATH5K_DBG(ah, ATH5K_DEBUG_CALIBRATE, "channel %u/%x\n",
2433 ieee80211_frequency_to_channel(ah->curchan->center_freq),
2434 ah->curchan->hw_value);
2436 if (ath5k_hw_phy_calibrate(ah, ah->curchan))
2437 ATH5K_ERR(ah, "calibration of channel %u failed\n",
2439 ah->curchan->center_freq));
2442 if (ah->ah_cal_mask & AR5K_CALIBRATION_FULL)
2443 ah->ah_cal_mask &= ~AR5K_CALIBRATION_FULL;
2444 else if (ah->ah_cal_mask & AR5K_CALIBRATION_SHORT)
2445 ah->ah_cal_mask &= ~AR5K_CALIBRATION_SHORT;
2452 struct ath5k_hw *ah = from_tasklet(ah, t, ani_tasklet);
2454 ah->ah_cal_mask |= AR5K_CALIBRATION_ANI;
2455 ath5k_ani_calibration(ah);
2456 ah->ah_cal_mask &= ~AR5K_CALIBRATION_ANI;
2463 struct ath5k_hw *ah = container_of(work, struct ath5k_hw,
2469 if (!test_bit(ATH_STAT_STARTED, ah->status))
2472 mutex_lock(&ah->lock);
2474 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++) {
2475 if (ah->txqs[i].setup) {
2476 txq = &ah->txqs[i];
2480 ATH5K_DBG(ah, ATH5K_DEBUG_XMIT,
2496 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2498 ath5k_reset(ah, NULL, true);
2501 mutex_unlock(&ah->lock);
2503 ieee80211_queue_delayed_work(ah->hw, &ah->tx_complete_work,
2529 ath5k_init_ah(struct ath5k_hw *ah, const struct ath_bus_ops *bus_ops)
2531 struct ieee80211_hw *hw = ah->hw;
2537 SET_IEEE80211_DEV(hw, ah->dev);
2571 __set_bit(ATH_STAT_INVALID, ah->status);
2573 ah->opmode = NL80211_IFTYPE_STATION;
2574 ah->bintval = 1000;
2575 mutex_init(&ah->lock);
2576 spin_lock_init(&ah->rxbuflock);
2577 spin_lock_init(&ah->txbuflock);
2578 spin_lock_init(&ah->block);
2579 spin_lock_init(&ah->irqlock);
2582 ret = request_irq(ah->irq, ath5k_intr, IRQF_SHARED, "ath", ah);
2584 ATH5K_ERR(ah, "request_irq failed\n");
2588 common = ath5k_hw_common(ah);
2591 common->ah = ah;
2593 common->priv = ah;
2606 ret = ath5k_hw_init(ah);
2611 if (ah->ah_capabilities.cap_has_mrr_support) {
2624 ATH5K_INFO(ah, "Atheros AR%s chip found (MAC: 0x%x, PHY: 0x%x)\n",
2625 ath5k_chip_name(AR5K_VERSION_MAC, ah->ah_mac_srev),
2626 ah->ah_mac_srev,
2627 ah->ah_phy_revision);
2629 if (!ah->ah_single_chip) {
2631 if (ah->ah_radio_5ghz_revision &&
2632 !ah->ah_radio_2ghz_revision) {
2635 ah->ah_capabilities.cap_mode)) {
2636 ATH5K_INFO(ah, "RF%s 2GHz radio found (0x%x)\n",
2638 ah->ah_radio_5ghz_revision),
2639 ah->ah_radio_5ghz_revision);
2643 ah->ah_capabilities.cap_mode)) {
2644 ATH5K_INFO(ah, "RF%s 5GHz radio found (0x%x)\n",
2646 ah->ah_radio_5ghz_revision),
2647 ah->ah_radio_5ghz_revision);
2650 ATH5K_INFO(ah, "RF%s multiband radio found"
2653 ah->ah_radio_5ghz_revision),
2654 ah->ah_radio_5ghz_revision);
2659 else if (ah->ah_radio_5ghz_revision &&
2660 ah->ah_radio_2ghz_revision) {
2661 ATH5K_INFO(ah, "RF%s 5GHz radio found (0x%x)\n",
2663 ah->ah_radio_5ghz_revision),
2664 ah->ah_radio_5ghz_revision);
2665 ATH5K_INFO(ah, "RF%s 2GHz radio found (0x%x)\n",
2667 ah->ah_radio_2ghz_revision),
2668 ah->ah_radio_2ghz_revision);
2672 ath5k_debug_init_device(ah);
2675 __clear_bit(ATH_STAT_INVALID, ah->status);
2679 ath5k_hw_deinit(ah);
2681 free_irq(ah->irq, ah);
2687 ath5k_stop_locked(struct ath5k_hw *ah)
2690 ATH5K_DBG(ah, ATH5K_DEBUG_RESET, "invalid %u\n",
2691 test_bit(ATH_STAT_INVALID, ah->status));
2708 ieee80211_stop_queues(ah->hw);
2710 if (!test_bit(ATH_STAT_INVALID, ah->status)) {
2711 ath5k_led_off(ah);
2712 ath5k_hw_set_imr(ah, 0);
2713 synchronize_irq(ah->irq);
2714 ath5k_rx_stop(ah);
2715 ath5k_hw_dma_stop(ah);
2716 ath5k_drain_tx_buffs(ah);
2717 ath5k_hw_phy_disable(ah);
2725 struct ath5k_hw *ah = hw->priv;
2726 struct ath_common *common = ath5k_hw_common(ah);
2729 mutex_lock(&ah->lock);
2731 ATH5K_DBG(ah, ATH5K_DEBUG_RESET, "mode %d\n", ah->opmode);
2737 ath5k_stop_locked(ah);
2746 ah->curchan = ah->hw->conf.chandef.chan;
2747 ah->imask = AR5K_INT_RXOK
2757 ret = ath5k_reset(ah, NULL, false);
2762 ath5k_rfkill_hw_start(ah);
2773 ah->ah_ack_bitrate_high = true;
2775 for (i = 0; i < ARRAY_SIZE(ah->bslot); i++)
2776 ah->bslot[i] = NULL;
2780 mutex_unlock(&ah->lock);
2782 set_bit(ATH_STAT_STARTED, ah->status);
2783 ieee80211_queue_delayed_work(ah->hw, &ah->tx_complete_work,
2789 static void ath5k_stop_tasklets(struct ath5k_hw *ah)
2791 ah->rx_pending = false;
2792 ah->tx_pending = false;
2793 tasklet_kill(&ah->rxtq);
2794 tasklet_kill(&ah->txtq);
2795 tasklet_kill(&ah->beacontq);
2796 tasklet_kill(&ah->ani_tasklet);
2807 struct ath5k_hw *ah = hw->priv;
2810 mutex_lock(&ah->lock);
2811 ret = ath5k_stop_locked(ah);
2812 if (ret == 0 && !test_bit(ATH_STAT_INVALID, ah->status)) {
2833 ret = ath5k_hw_on_hold(ah);
2835 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2839 mutex_unlock(&ah->lock);
2841 ath5k_stop_tasklets(ah);
2843 clear_bit(ATH_STAT_STARTED, ah->status);
2844 cancel_delayed_work_sync(&ah->tx_complete_work);
2847 ath5k_rfkill_hw_stop(ah);
2854 * This should be called with ah->lock.
2857 ath5k_reset(struct ath5k_hw *ah, struct ieee80211_channel *chan,
2860 struct ath_common *common = ath5k_hw_common(ah);
2864 ATH5K_DBG(ah, ATH5K_DEBUG_RESET, "resetting\n");
2866 __set_bit(ATH_STAT_RESET, ah->status);
2868 ath5k_hw_set_imr(ah, 0);
2869 synchronize_irq(ah->irq);
2870 ath5k_stop_tasklets(ah);
2875 ani_mode = ah->ani_state.ani_mode;
2876 ath5k_ani_init(ah, ATH5K_ANI_MODE_OFF);
2881 ath5k_drain_tx_buffs(ah);
2884 ath5k_hw_stop_rx_pcu(ah);
2891 ret = ath5k_hw_dma_stop(ah);
2897 ATH5K_DBG(ah, ATH5K_DEBUG_RESET,
2903 ah->curchan = chan;
2905 ret = ath5k_hw_reset(ah, ah->opmode, ah->curchan, fast, skip_pcu);
2907 ATH5K_ERR(ah, "can't reset hardware (%d)\n", ret);
2911 ret = ath5k_rx_start(ah);
2913 ATH5K_ERR(ah, "can't start recv logic\n");
2917 ath5k_ani_init(ah, ani_mode);
2930 ah->ah_cal_next_full = jiffies +
2932 ah->ah_cal_next_ani = jiffies +
2934 ah->ah_cal_next_short = jiffies +
2937 ewma_beacon_rssi_init(&ah->ah_beacon_rssi_avg);
2940 memset(&ah->survey, 0, sizeof(ah->survey));
2956 /* ath5k_chan_change(ah, c); */
2958 __clear_bit(ATH_STAT_RESET, ah->status);
2960 ath5k_beacon_config(ah);
2963 ieee80211_wake_queues(ah->hw);
2972 struct ath5k_hw *ah = container_of(work, struct ath5k_hw,
2975 mutex_lock(&ah->lock);
2976 ath5k_reset(ah, NULL, true);
2977 mutex_unlock(&ah->lock);
2984 struct ath5k_hw *ah = hw->priv;
2985 struct ath_regulatory *regulatory = ath5k_hw_regulatory(ah);
2999 ATH5K_ERR(ah, "can't get channels\n");
3006 ret = ath5k_desc_alloc(ah);
3008 ATH5K_ERR(ah, "can't allocate descriptors\n");
3018 ret = ath5k_beaconq_setup(ah);
3020 ATH5K_ERR(ah, "can't setup a beacon xmit queue\n");
3023 ah->bhalq = ret;
3024 ah->cabq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_CAB, 0);
3025 if (IS_ERR(ah->cabq)) {
3026 ATH5K_ERR(ah, "can't setup cab queue\n");
3027 ret = PTR_ERR(ah->cabq);
3033 if (ah->ah_capabilities.cap_queues.q_tx_num >= 6) {
3036 txq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_DATA, AR5K_WME_AC_VO);
3038 ATH5K_ERR(ah, "can't setup xmit queue\n");
3042 txq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_DATA, AR5K_WME_AC_VI);
3044 ATH5K_ERR(ah, "can't setup xmit queue\n");
3048 txq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_DATA, AR5K_WME_AC_BE);
3050 ATH5K_ERR(ah, "can't setup xmit queue\n");
3054 txq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_DATA, AR5K_WME_AC_BK);
3056 ATH5K_ERR(ah, "can't setup xmit queue\n");
3063 txq = ath5k_txq_setup(ah, AR5K_TX_QUEUE_DATA, AR5K_WME_AC_BE);
3065 ATH5K_ERR(ah, "can't setup xmit queue\n");
3072 tasklet_setup(&ah->rxtq, ath5k_tasklet_rx);
3073 tasklet_setup(&ah->txtq, ath5k_tasklet_tx);
3074 tasklet_setup(&ah->beacontq, ath5k_tasklet_beacon);
3075 tasklet_setup(&ah->ani_tasklet, ath5k_tasklet_ani);
3077 INIT_WORK(&ah->reset_work, ath5k_reset_work);
3078 INIT_WORK(&ah->calib_work, ath5k_calibrate_work);
3079 INIT_DELAYED_WORK(&ah->tx_complete_work, ath5k_tx_complete_poll_work);
3081 ret = ath5k_hw_common(ah)->bus_ops->eeprom_read_mac(ah, mac);
3083 ATH5K_ERR(ah, "unable to read address from EEPROM\n");
3089 ath5k_update_bssid_mask_and_opmode(ah, NULL);
3091 regulatory->current_rd = ah->ah_capabilities.cap_eeprom.ee_regdomain;
3094 ATH5K_ERR(ah, "can't initialize regulatory system\n");
3100 ATH5K_ERR(ah, "can't register ieee80211 hw\n");
3107 ath5k_init_leds(ah);
3109 ath5k_sysfs_register(ah);
3113 ath5k_txq_release(ah);
3115 ath5k_hw_release_tx_queue(ah, ah->bhalq);
3117 ath5k_desc_free(ah);
3123 ath5k_deinit_ah(struct ath5k_hw *ah)
3125 struct ieee80211_hw *hw = ah->hw;
3141 ath5k_desc_free(ah);
3142 ath5k_txq_release(ah);
3143 ath5k_hw_release_tx_queue(ah, ah->bhalq);
3144 ath5k_unregister_leds(ah);
3146 ath5k_sysfs_unregister(ah);
3152 ath5k_hw_deinit(ah);
3153 free_irq(ah->irq, ah);
3157 ath5k_any_vif_assoc(struct ath5k_hw *ah)
3166 ah->hw, IEEE80211_IFACE_ITER_RESUME_ALL,
3174 struct ath5k_hw *ah = hw->priv;
3176 rfilt = ath5k_hw_get_rx_filter(ah);
3181 ath5k_hw_set_rx_filter(ah, rfilt);
3182 ah->filter_flags = rfilt;
3185 void _ath5k_printk(const struct ath5k_hw *ah, const char *level,
3196 if (ah && ah->hw)
3198 level, wiphy_name(ah->hw->wiphy), &vaf);