spin_lock_bh(&txq->axq_lock);
tid->paused = false;
- if (list_empty(&tid->buf_q))
+ if (skb_queue_empty(&tid->buf_q))
goto unlock;
ath_tx_queue_tid(txq, tid);
static void ath_tx_flush_tid(struct ath_softc *sc, struct ath_atx_tid *tid)
{
struct ath_txq *txq = tid->ac->txq;
+ struct sk_buff *skb;
struct ath_buf *bf;
struct list_head bf_head;
struct ath_tx_status ts;
memset(&ts, 0, sizeof(ts));
spin_lock_bh(&txq->axq_lock);
- while (!list_empty(&tid->buf_q)) {
- bf = list_first_entry(&tid->buf_q, struct ath_buf, list);
- list_move_tail(&bf->list, &bf_head);
+ while ((skb = __skb_dequeue(&tid->buf_q))) {
+ fi = get_frame_info(skb);
+ bf = fi->bf;
+
+ list_add_tail(&bf->list, &bf_head);
spin_unlock_bh(&txq->axq_lock);
- fi = get_frame_info(bf->bf_mpdu);
if (fi->retries) {
ath_tx_update_baw(sc, tid, fi->seqno);
ath_tx_complete_buf(sc, bf, txq, &bf_head, &ts, 0, 1);
struct ath_atx_tid *tid)
{
+ struct sk_buff *skb;
struct ath_buf *bf;
struct list_head bf_head;
struct ath_tx_status ts;
memset(&ts, 0, sizeof(ts));
INIT_LIST_HEAD(&bf_head);
- for (;;) {
- if (list_empty(&tid->buf_q))
- break;
+ while ((skb = __skb_dequeue(&tid->buf_q))) {
+ fi = get_frame_info(skb);
+ bf = fi->bf;
- bf = list_first_entry(&tid->buf_q, struct ath_buf, list);
- list_move_tail(&bf->list, &bf_head);
+ list_add_tail(&bf->list, &bf_head);
- fi = get_frame_info(bf->bf_mpdu);
if (fi->retries)
ath_tx_update_baw(sc, tid, fi->seqno);
struct ieee80211_tx_info *tx_info;
struct ath_atx_tid *tid = NULL;
struct ath_buf *bf_next, *bf_last = bf->bf_lastbf;
- struct list_head bf_head, bf_pending;
+ struct list_head bf_head;
+ struct sk_buff_head bf_pending;
u16 seq_st = 0, acked_cnt = 0, txfail_cnt = 0;
u32 ba[WME_BA_BMP_SIZE >> 5];
int isaggr, txfail, txpending, sendbar = 0, needreset = 0, nbad = 0;
}
}
- INIT_LIST_HEAD(&bf_pending);
- INIT_LIST_HEAD(&bf_head);
+ __skb_queue_head_init(&bf_pending);
ath_tx_count_frames(sc, bf, ts, txok, &nframes, &nbad);
while (bf) {
* Make sure the last desc is reclaimed if it
* not a holding desc.
*/
- if (!bf_last->bf_stale || bf_next != NULL)
+ INIT_LIST_HEAD(&bf_head);
+ if ((sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) ||
+ bf_next != NULL || !bf_last->bf_stale)
list_move_tail(&bf->list, &bf_head);
- else
- INIT_LIST_HEAD(&bf_head);
if (!txpending || (tid->state & AGGR_CLEANUP)) {
/*
ath9k_hw_cleartxdesc(sc->sc_ah,
tbf->bf_desc);
- list_add_tail(&tbf->list, &bf_head);
+ fi->bf = tbf;
} else {
/*
* Clear descriptor status words for
* Put this buffer to the temporary pending
* queue to retain ordering
*/
- list_splice_tail_init(&bf_head, &bf_pending);
+ __skb_queue_tail(&bf_pending, skb);
}
bf = bf_next;
}
/* prepend un-acked frames to the beginning of the pending frame queue */
- if (!list_empty(&bf_pending)) {
+ if (!skb_queue_empty(&bf_pending)) {
if (an->sleeping)
ieee80211_sta_set_tim(sta);
spin_lock_bh(&txq->axq_lock);
if (clear_filter)
tid->ac->clear_ps_filter = true;
- list_splice(&bf_pending, &tid->buf_q);
+ skb_queue_splice(&bf_pending, &tid->buf_q);
if (!an->sleeping)
ath_tx_queue_tid(txq, tid);
spin_unlock_bh(&txq->axq_lock);
int *aggr_len)
{
#define PADBYTES(_len) ((4 - ((_len) % 4)) % 4)
- struct ath_buf *bf, *bf_first, *bf_prev = NULL;
+ struct ath_buf *bf, *bf_first = NULL, *bf_prev = NULL;
int rl = 0, nframes = 0, ndelim, prev_al = 0;
u16 aggr_limit = 0, al = 0, bpad = 0,
al_delta, h_baw = tid->baw_size / 2;
enum ATH_AGGR_STATUS status = ATH_AGGR_DONE;
struct ieee80211_tx_info *tx_info;
struct ath_frame_info *fi;
-
- bf_first = list_first_entry(&tid->buf_q, struct ath_buf, list);
+ struct sk_buff *skb;
do {
- bf = list_first_entry(&tid->buf_q, struct ath_buf, list);
- fi = get_frame_info(bf->bf_mpdu);
+ skb = skb_peek(&tid->buf_q);
+ fi = get_frame_info(skb);
+ bf = fi->bf;
+
+ if (!bf_first)
+ bf_first = bf;
/* do not step over block-ack window */
if (!BAW_WITHIN(tid->seq_start, tid->baw_size, fi->seqno)) {
if (!fi->retries)
ath_tx_addto_baw(sc, tid, fi->seqno);
ath9k_hw_set11n_aggr_middle(sc->sc_ah, bf->bf_desc, ndelim);
- list_move_tail(&bf->list, bf_q);
+
+ __skb_unlink(skb, &tid->buf_q);
+ list_add_tail(&bf->list, bf_q);
if (bf_prev) {
bf_prev->bf_next = bf;
ath9k_hw_set_desc_link(sc->sc_ah, bf_prev->bf_desc,
}
bf_prev = bf;
- } while (!list_empty(&tid->buf_q));
+ } while (!skb_queue_empty(&tid->buf_q));
*aggr_len = al;
int aggr_len;
do {
- if (list_empty(&tid->buf_q))
+ if (skb_queue_empty(&tid->buf_q))
return;
INIT_LIST_HEAD(&bf_q);
spin_lock_bh(&txq->axq_lock);
- if (!list_empty(&tid->buf_q))
+ if (!skb_queue_empty(&tid->buf_q))
buffered = true;
tid->sched = false;
spin_lock_bh(&txq->axq_lock);
ac->clear_ps_filter = true;
- if (!list_empty(&tid->buf_q) && !tid->paused) {
+ if (!skb_queue_empty(&tid->buf_q) && !tid->paused) {
ath_tx_queue_tid(txq, tid);
ath_txq_schedule(sc, txq);
}
* add tid to round-robin queue if more frames
* are pending for the tid
*/
- if (!list_empty(&tid->buf_q))
+ if (!skb_queue_empty(&tid->buf_q))
ath_tx_queue_tid(txq, tid);
if (tid == last_tid ||
* - seqno is not within block-ack window
* - h/w queue depth exceeds low water mark
*/
- if (!list_empty(&tid->buf_q) || tid->paused ||
+ if (!skb_queue_empty(&tid->buf_q) || tid->paused ||
!BAW_WITHIN(tid->seq_start, tid->baw_size, fi->seqno) ||
txctl->txq->axq_ampdu_depth >= ATH_AGGR_MIN_QDEPTH) {
/*
* for aggregation.
*/
TX_STAT_INC(txctl->txq->axq_qnum, a_queued_sw);
- list_add_tail(&bf->list, &tid->buf_q);
+ __skb_queue_tail(&tid->buf_q, bf->bf_mpdu);
if (!txctl->an || !txctl->an->sleeping)
ath_tx_queue_tid(txctl->txq, tid);
return;
bf->bf_buf_addr,
txq->axq_qnum);
+ fi->bf = bf;
return bf;
}
tid->sched = false;
tid->paused = false;
tid->state &= ~AGGR_CLEANUP;
- INIT_LIST_HEAD(&tid->buf_q);
+ __skb_queue_head_init(&tid->buf_q);
acno = TID_TO_WME_AC(tidno);
tid->ac = &an->ac[acno];
tid->state &= ~AGGR_ADDBA_COMPLETE;