Lines Matching defs:tid_agg_rx
1091 static inline bool ieee80211_rx_reorder_ready(struct tid_ampdu_rx *tid_agg_rx,
1094 struct sk_buff_head *frames = &tid_agg_rx->reorder_buf[index];
1098 if (tid_agg_rx->reorder_buf_filtered & BIT_ULL(index))
1112 struct tid_ampdu_rx *tid_agg_rx,
1116 struct sk_buff_head *skb_list = &tid_agg_rx->reorder_buf[index];
1120 lockdep_assert_held(&tid_agg_rx->reorder_lock);
1125 if (!ieee80211_rx_reorder_ready(tid_agg_rx, index)) {
1131 tid_agg_rx->stored_mpdu_num--;
1139 tid_agg_rx->reorder_buf_filtered &= ~BIT_ULL(index);
1140 tid_agg_rx->head_seq_num = ieee80211_sn_inc(tid_agg_rx->head_seq_num);
1144 struct tid_ampdu_rx *tid_agg_rx,
1150 lockdep_assert_held(&tid_agg_rx->reorder_lock);
1152 while (ieee80211_sn_less(tid_agg_rx->head_seq_num, head_seq_num)) {
1153 index = tid_agg_rx->head_seq_num % tid_agg_rx->buf_size;
1154 ieee80211_release_reorder_frame(sdata, tid_agg_rx, index,
1166 * Callers must hold tid_agg_rx->reorder_lock.
1171 struct tid_ampdu_rx *tid_agg_rx,
1176 lockdep_assert_held(&tid_agg_rx->reorder_lock);
1179 index = tid_agg_rx->head_seq_num % tid_agg_rx->buf_size;
1180 if (!ieee80211_rx_reorder_ready(tid_agg_rx, index) &&
1181 tid_agg_rx->stored_mpdu_num) {
1187 for (j = (index + 1) % tid_agg_rx->buf_size; j != index;
1188 j = (j + 1) % tid_agg_rx->buf_size) {
1189 if (!ieee80211_rx_reorder_ready(tid_agg_rx, j)) {
1194 !time_after(jiffies, tid_agg_rx->reorder_time[j] +
1199 for (i = (index + 1) % tid_agg_rx->buf_size; i != j;
1200 i = (i + 1) % tid_agg_rx->buf_size)
1201 __skb_queue_purge(&tid_agg_rx->reorder_buf[i]);
1205 ieee80211_release_reorder_frame(sdata, tid_agg_rx, j,
1211 tid_agg_rx->head_seq_num =
1212 (tid_agg_rx->head_seq_num +
1216 } else while (ieee80211_rx_reorder_ready(tid_agg_rx, index)) {
1217 ieee80211_release_reorder_frame(sdata, tid_agg_rx, index,
1219 index = tid_agg_rx->head_seq_num % tid_agg_rx->buf_size;
1222 if (tid_agg_rx->stored_mpdu_num) {
1223 j = index = tid_agg_rx->head_seq_num % tid_agg_rx->buf_size;
1225 for (; j != (index - 1) % tid_agg_rx->buf_size;
1226 j = (j + 1) % tid_agg_rx->buf_size) {
1227 if (ieee80211_rx_reorder_ready(tid_agg_rx, j))
1233 if (!tid_agg_rx->removed)
1234 mod_timer(&tid_agg_rx->reorder_timer,
1235 tid_agg_rx->reorder_time[j] + 1 +
1238 del_timer(&tid_agg_rx->reorder_timer);
1248 struct tid_ampdu_rx *tid_agg_rx,
1260 spin_lock(&tid_agg_rx->reorder_lock);
1266 if (unlikely(tid_agg_rx->auto_seq)) {
1267 tid_agg_rx->auto_seq = false;
1268 tid_agg_rx->ssn = mpdu_seq_num;
1269 tid_agg_rx->head_seq_num = mpdu_seq_num;
1272 buf_size = tid_agg_rx->buf_size;
1273 head_seq_num = tid_agg_rx->head_seq_num;
1279 if (unlikely(!tid_agg_rx->started)) {
1284 tid_agg_rx->started = true;
1301 ieee80211_release_reorder_frames(sdata, tid_agg_rx,
1307 index = mpdu_seq_num % tid_agg_rx->buf_size;
1310 if (ieee80211_rx_reorder_ready(tid_agg_rx, index)) {
1321 if (mpdu_seq_num == tid_agg_rx->head_seq_num &&
1322 tid_agg_rx->stored_mpdu_num == 0) {
1324 tid_agg_rx->head_seq_num =
1325 ieee80211_sn_inc(tid_agg_rx->head_seq_num);
1331 __skb_queue_tail(&tid_agg_rx->reorder_buf[index], skb);
1333 tid_agg_rx->reorder_time[index] = jiffies;
1334 tid_agg_rx->stored_mpdu_num++;
1335 ieee80211_sta_reorder_release(sdata, tid_agg_rx, frames);
1339 spin_unlock(&tid_agg_rx->reorder_lock);
1354 struct tid_ampdu_rx *tid_agg_rx;
1374 tid_agg_rx = rcu_dereference(sta->ampdu_mlme.tid_rx[tid]);
1375 if (!tid_agg_rx) {
1396 if (tid_agg_rx->timeout)
1397 tid_agg_rx->last_rx = jiffies;
1409 * RX packet at a time, and thus own tid_agg_rx. All
1414 if (ieee80211_sta_manage_reorder_buf(rx->sdata, tid_agg_rx, skb,
3062 struct tid_ampdu_rx *tid_agg_rx;
3092 tid_agg_rx = rcu_dereference(rx->sta->ampdu_mlme.tid_rx[tid]);
3093 if (!tid_agg_rx)
3102 if (tid_agg_rx->timeout)
3103 mod_timer(&tid_agg_rx->session_timer,
3104 TU_TO_EXP_TIME(tid_agg_rx->timeout));
3106 spin_lock(&tid_agg_rx->reorder_lock);
3108 ieee80211_release_reorder_frames(rx->sdata, tid_agg_rx,
3110 spin_unlock(&tid_agg_rx->reorder_lock);
3941 struct tid_ampdu_rx *tid_agg_rx;
3943 tid_agg_rx = rcu_dereference(sta->ampdu_mlme.tid_rx[tid]);
3944 if (!tid_agg_rx)
3949 spin_lock(&tid_agg_rx->reorder_lock);
3950 ieee80211_sta_reorder_release(sta->sdata, tid_agg_rx, &frames);
3951 spin_unlock(&tid_agg_rx->reorder_lock);
3970 struct tid_ampdu_rx *tid_agg_rx;
3991 tid_agg_rx = rcu_dereference(sta->ampdu_mlme.tid_rx[tid]);
3992 if (!tid_agg_rx)
3995 spin_lock_bh(&tid_agg_rx->reorder_lock);
4001 release = (tid_agg_rx->head_seq_num + tid_agg_rx->buf_size) %
4003 ieee80211_release_reorder_frames(sta->sdata, tid_agg_rx,
4006 tid_agg_rx->head_seq_num = ssn;
4008 ieee80211_release_reorder_frames(sta->sdata, tid_agg_rx, ssn,
4013 * it can be tid_agg_rx->buf_size behind and still be valid */
4014 diff = (tid_agg_rx->head_seq_num - ssn) & IEEE80211_SN_MASK;
4015 if (diff >= tid_agg_rx->buf_size) {
4016 tid_agg_rx->reorder_buf_filtered = 0;
4023 for (i = 0; i < tid_agg_rx->buf_size; i++) {
4024 int index = (ssn + i) % tid_agg_rx->buf_size;
4026 tid_agg_rx->reorder_buf_filtered &= ~BIT_ULL(index);
4028 tid_agg_rx->reorder_buf_filtered |= BIT_ULL(index);
4032 ieee80211_sta_reorder_release(sta->sdata, tid_agg_rx, &frames);
4035 spin_unlock_bh(&tid_agg_rx->reorder_lock);