2 * Copyright (c) 2013 Qualcomm Atheros, Inc.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted (subject to the limitations in the
7 * disclaimer below) provided that the following conditions are met:
9 * * Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the
17 * * Neither the name of Qualcomm Atheros nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
22 * GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
23 * HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
24 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
25 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
27 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
29 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
30 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
31 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
32 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
33 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 #include <adf_os_types.h>
37 #include <adf_os_dma.h>
38 #include <adf_os_timer.h>
39 #include <adf_os_lock.h>
40 #include <adf_os_io.h>
41 #include <adf_os_mem.h>
42 #include <adf_os_util.h>
43 #include <adf_os_stdtypes.h>
44 #include <adf_os_defer.h>
45 #include <adf_os_atomic.h>
48 #include <adf_net_wcmd.h>
52 #ifdef USE_HEADERLEN_RESV
56 #include <ieee80211_var.h>
57 #include "if_athrate.h"
58 #include "if_athvar.h"
61 #define ath_tgt_free_skb adf_nbuf_free
63 #define OFDM_PLCP_BITS 22
64 #define HT_RC_2_MCS(_rc) ((_rc) & 0x0f)
65 #define HT_RC_2_STREAMS(_rc) ((((_rc) & 0x78) >> 3) + 1)
71 #define HT_LTF(_ns) (4 * (_ns))
72 #define SYMBOL_TIME(_ns) ((_ns) << 2) // ns * 4 us
73 #define SYMBOL_TIME_HALFGI(_ns) (((_ns) * 18 + 4) / 5) // ns * 3.6 us
75 static a_uint16_t bits_per_symbol[][2] = {
77 { 26, 54 }, // 0: BPSK
78 { 52, 108 }, // 1: QPSK 1/2
79 { 78, 162 }, // 2: QPSK 3/4
80 { 104, 216 }, // 3: 16-QAM 1/2
81 { 156, 324 }, // 4: 16-QAM 3/4
82 { 208, 432 }, // 5: 64-QAM 2/3
83 { 234, 486 }, // 6: 64-QAM 3/4
84 { 260, 540 }, // 7: 64-QAM 5/6
85 { 52, 108 }, // 8: BPSK
86 { 104, 216 }, // 9: QPSK 1/2
87 { 156, 324 }, // 10: QPSK 3/4
88 { 208, 432 }, // 11: 16-QAM 1/2
89 { 312, 648 }, // 12: 16-QAM 3/4
90 { 416, 864 }, // 13: 64-QAM 2/3
91 { 468, 972 }, // 14: 64-QAM 3/4
92 { 520, 1080 }, // 15: 64-QAM 5/6
95 void owltgt_tx_processq(struct ath_softc_tgt *sc, struct ath_txq *txq,
96 owl_txq_state_t txqstate);
97 static void ath_tgt_txqaddbuf(struct ath_softc_tgt *sc, struct ath_txq *txq,
98 struct ath_tx_buf *bf, struct ath_tx_desc *lastds);
99 void ath_rate_findrate_11n_Hardcoded(struct ath_softc_tgt *sc,
100 struct ath_rc_series series[]);
101 void ath_buf_set_rate_Hardcoded(struct ath_softc_tgt *sc,
102 struct ath_tx_buf *bf) ;
103 static a_int32_t ath_tgt_txbuf_setup(struct ath_softc_tgt *sc,
104 struct ath_tx_buf *bf, ath_data_hdr_t *dh);
105 static void ath_tx_freebuf(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
106 static void ath_tx_uc_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
107 static void ath_update_stats(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
108 void adf_print_buf(adf_nbuf_t buf);
109 static void ath_tgt_tx_enqueue(struct ath_txq *txq, struct ath_atx_tid *tid);
111 void ath_tgt_tx_comp_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
112 struct ieee80211_frame *ATH_SKB_2_WH(adf_nbuf_t skb);
114 void ath_tgt_tx_send_normal(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
116 static void ath_tgt_tx_sched_normal(struct ath_softc_tgt *sc, ath_atx_tid_t *tid);
117 static void ath_tgt_tx_sched_aggr(struct ath_softc_tgt *sc, ath_atx_tid_t *tid);
119 extern a_int32_t ath_chainmask_sel_logic(void *);
120 static a_int32_t ath_get_pktlen(struct ath_tx_buf *bf, a_int32_t hdrlen);
121 static void ath_tgt_txq_schedule(struct ath_softc_tgt *sc, struct ath_txq *txq);
123 typedef void (*ath_ft_set_atype_t)(struct ath_softc_tgt *sc, struct ath_buf *bf);
126 ath_tx_set_retry(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
129 ath_bar_tx(struct ath_softc_tgt *sc, ath_atx_tid_t *tid, struct ath_tx_buf *bf);
131 ath_tx_update_baw(ath_atx_tid_t *tid, int seqno);
133 ath_tx_retry_subframe(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
134 ath_tx_bufhead *bf_q, struct ath_tx_buf **bar);
137 ath_tx_comp_aggr_error(struct ath_softc_tgt *sc, struct ath_tx_buf *bf, ath_atx_tid_t *tid);
139 void ath_tx_addto_baw(ath_atx_tid_t *tid, struct ath_tx_buf *bf);
140 static inline void ath_tx_retry_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
141 static void ath_tx_comp_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
142 static void ath_update_aggr_stats(struct ath_softc_tgt *sc, struct ath_tx_desc *ds,
143 int nframes, int nbad);
144 static inline void ath_aggr_resume_tid(struct ath_softc_tgt *sc, ath_atx_tid_t *tid);
145 static void ath_tx_comp_cleanup(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
147 int ath_tgt_tx_add_to_aggr(struct ath_softc_tgt *sc,
148 struct ath_buf *bf,int datatype,
149 ath_atx_tid_t *tid, int is_burst);
151 struct ieee80211_frame *ATH_SKB_2_WH(adf_nbuf_t skb)
156 adf_nbuf_peek_header(skb, &anbdata, &anblen);
158 return((struct ieee80211_frame *)anbdata);
161 #undef adf_os_cpu_to_le16
163 static a_uint16_t adf_os_cpu_to_le16(a_uint16_t x)
165 return ((((x) & 0xff00) >> 8) | (((x) & 0x00ff) << 8));
169 ath_aggr_resume_tid(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
173 txq = TID_TO_ACTXQ(tid->tidno);
176 if (asf_tailq_empty(&tid->buf_q))
179 ath_tgt_tx_enqueue(txq, tid);
180 ath_tgt_txq_schedule(sc, txq);
184 ath_aggr_pause_tid(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
189 static a_uint32_t ath_pkt_duration(struct ath_softc_tgt *sc,
190 a_uint8_t rix, struct ath_tx_buf *bf,
191 a_int32_t width, a_int32_t half_gi)
193 const HAL_RATE_TABLE *rt = sc->sc_currates;
194 a_uint32_t nbits, nsymbits, duration, nsymbols;
199 pktlen = bf->bf_isaggr ? bf->bf_al : bf->bf_pktlen;
200 rc = rt->info[rix].rateCode;
203 return ath_hal_computetxtime(sc->sc_ah, rt, pktlen, rix,
206 nbits = (pktlen << 3) + OFDM_PLCP_BITS;
207 nsymbits = bits_per_symbol[HT_RC_2_MCS(rc)][width];
208 nsymbols = (nbits + nsymbits - 1) / nsymbits;
211 duration = SYMBOL_TIME(nsymbols);
213 duration = SYMBOL_TIME_HALFGI(nsymbols);
215 streams = HT_RC_2_STREAMS(rc);
216 duration += L_STF + L_LTF + L_SIG + HT_SIG + HT_STF + HT_LTF(streams);
221 static void ath_dma_map(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
223 adf_nbuf_t skb = bf->bf_skb;
225 skb = adf_nbuf_queue_first(&bf->bf_skbhead);
226 adf_nbuf_map(sc->sc_dev, bf->bf_dmamap, skb, ADF_OS_DMA_TO_DEVICE);
229 static void ath_dma_unmap(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
231 adf_nbuf_t skb = bf->bf_skb;
233 skb = adf_nbuf_queue_first(&bf->bf_skbhead);
234 adf_nbuf_unmap( sc->sc_dev, bf->bf_dmamap, ADF_OS_DMA_TO_DEVICE);
237 static void ath_filltxdesc(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
239 struct ath_tx_desc *ds0, *ds = bf->bf_desc;
243 adf_nbuf_dmamap_info(bf->bf_dmamap, &bf->bf_dmamap_info);
245 for (i = 0; i < bf->bf_dmamap_info.nsegs; i++, ds++) {
247 ds->ds_data = bf->bf_dmamap_info.dma_segs[i].paddr;
249 if (i == (bf->bf_dmamap_info.nsegs - 1)) {
253 ds->ds_link = ATH_BUF_GET_DESC_PHY_ADDR_WITH_IDX(bf, i+1);
255 ath_hal_filltxdesc(sc->sc_ah, ds
256 , bf->bf_dmamap_info.dma_segs[i].len
258 , i == (bf->bf_dmamap_info.nsegs - 1)
263 static void ath_tx_tgt_setds(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
265 struct ath_tx_desc *ds = bf->bf_desc;
266 struct ath_hal *ah = sc->sc_ah;
268 switch (bf->bf_protmode) {
269 case IEEE80211_PROT_RTSCTS:
270 bf->bf_flags |= HAL_TXDESC_RTSENA;
272 case IEEE80211_PROT_CTSONLY:
273 bf->bf_flags |= HAL_TXDESC_CTSENA;
279 ah->ah_set11nTxDesc(ah, ds
285 , bf->bf_flags | HAL_TXDESC_INTREQ);
287 ath_filltxdesc(sc, bf);
290 static struct ath_tx_buf *ath_buf_toggle(struct ath_softc_tgt *sc,
291 struct ath_tx_buf *bf,
294 struct ath_tx_buf *tmp = NULL;
295 adf_nbuf_t buf = NULL;
297 adf_os_assert(sc->sc_txbuf_held != NULL);
299 tmp = sc->sc_txbuf_held;
302 ath_dma_unmap(sc, bf);
303 adf_nbuf_queue_init(&tmp->bf_skbhead);
304 buf = adf_nbuf_queue_remove(&bf->bf_skbhead);
306 adf_nbuf_queue_add(&tmp->bf_skbhead, buf);
308 adf_os_assert(adf_nbuf_queue_len(&bf->bf_skbhead) == 0);
310 tmp->bf_next = bf->bf_next;
311 tmp->bf_endpt = bf->bf_endpt;
312 tmp->bf_tidno = bf->bf_tidno;
313 tmp->bf_skb = bf->bf_skb;
314 tmp->bf_node = bf->bf_node;
315 tmp->bf_isaggr = bf->bf_isaggr;
316 tmp->bf_flags = bf->bf_flags;
317 tmp->bf_state = bf->bf_state;
318 tmp->bf_retries = bf->bf_retries;
319 tmp->bf_comp = bf->bf_comp;
320 tmp->bf_nframes = bf->bf_nframes;
321 tmp->bf_cookie = bf->bf_cookie;
333 ath_dma_map(sc, tmp);
334 ath_tx_tgt_setds(sc, tmp);
337 sc->sc_txbuf_held = bf;
342 static void ath_tgt_skb_free(struct ath_softc_tgt *sc,
343 adf_nbuf_queue_t *head,
344 HTC_ENDPOINT_ID endpt)
348 while (adf_nbuf_queue_len(head) != 0) {
349 tskb = adf_nbuf_queue_remove(head);
350 ath_free_tx_skb(sc->tgt_htc_handle,endpt,tskb);
354 static void ath_buf_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
356 ath_dma_unmap(sc, bf);
357 ath_tgt_skb_free(sc, &bf->bf_skbhead,bf->bf_endpt);
360 bf = ath_buf_toggle(sc, bf, 0);
362 asf_tailq_insert_tail(&sc->sc_txbuf, bf, bf_list);
366 static void ath_buf_set_rate(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
368 struct ath_hal *ah = sc->sc_ah;
369 const HAL_RATE_TABLE *rt;
370 struct ath_tx_desc *ds = bf->bf_desc;
371 HAL_11N_RATE_SERIES series[4];
373 a_uint8_t rix, cix, rtsctsrate;
374 a_uint32_t ctsduration = 0;
375 a_int32_t prot_mode = AH_FALSE;
377 rt = sc->sc_currates;
378 rix = bf->bf_rcs[0].rix;
379 flags = (bf->bf_flags & (HAL_TXDESC_RTSENA | HAL_TXDESC_CTSENA));
380 cix = rt->info[sc->sc_protrix].controlRate;
382 if (bf->bf_protmode != IEEE80211_PROT_NONE &&
383 (rt->info[rix].phy == IEEE80211_T_OFDM ||
384 rt->info[rix].phy == IEEE80211_T_HT) &&
385 (bf->bf_flags & HAL_TXDESC_NOACK) == 0) {
386 cix = rt->info[sc->sc_protrix].controlRate;
389 if (ath_hal_htsupported(ah) && (!bf->bf_ismcast))
390 flags = HAL_TXDESC_RTSENA;
393 if (bf->bf_rcs[i].tries) {
394 cix = rt->info[bf->bf_rcs[i].rix].controlRate;
401 adf_os_mem_set(series, 0, sizeof(HAL_11N_RATE_SERIES) * 4);
403 for (i = 0; i < 4; i++) {
404 if (!bf->bf_rcs[i].tries)
407 rix = bf->bf_rcs[i].rix;
409 series[i].Rate = rt->info[rix].rateCode |
410 (bf->bf_shpream ? rt->info[rix].shortPreamble : 0);
412 series[i].Tries = bf->bf_rcs[i].tries;
414 series[i].RateFlags = ((bf->bf_rcs[i].flags & ATH_RC_RTSCTS_FLAG) ?
415 HAL_RATESERIES_RTS_CTS : 0 ) |
416 ((bf->bf_rcs[i].flags & ATH_RC_CW40_FLAG) ?
417 HAL_RATESERIES_2040 : 0 ) |
418 ((bf->bf_rcs[i].flags & ATH_RC_HT40_SGI_FLAG) ?
419 HAL_RATESERIES_HALFGI : 0 ) |
420 ((bf->bf_rcs[i].flags & ATH_RC_TX_STBC_FLAG) ?
421 HAL_RATESERIES_STBC: 0);
423 series[i].RateFlags = ((bf->bf_rcs[i].flags & ATH_RC_RTSCTS_FLAG) ?
424 HAL_RATESERIES_RTS_CTS : 0 ) |
425 ((bf->bf_rcs[i].flags & ATH_RC_CW40_FLAG) ?
426 HAL_RATESERIES_2040 : 0 ) |
427 ((bf->bf_rcs[i].flags & ATH_RC_HT40_SGI_FLAG) ?
428 HAL_RATESERIES_HALFGI : 0 );
430 series[i].PktDuration = ath_pkt_duration(sc, rix, bf,
431 (bf->bf_rcs[i].flags & ATH_RC_CW40_FLAG) != 0,
432 (bf->bf_rcs[i].flags & ATH_RC_HT40_SGI_FLAG));
434 series[i].ChSel = sc->sc_ic.ic_tx_chainmask;
437 series[i].RateFlags |= HAL_RATESERIES_RTS_CTS;
439 if (bf->bf_rcs[i].flags & ATH_RC_DS_FLAG)
440 series[i].RateFlags |= HAL_RATESERIES_RTS_CTS;
443 rtsctsrate = rt->info[cix].rateCode |
444 (bf->bf_shpream ? rt->info[cix].shortPreamble : 0);
446 ah->ah_set11nRateScenario(ah, ds, 1,
447 rtsctsrate, ctsduration,
452 static void ath_tgt_rate_findrate(struct ath_softc_tgt *sc,
453 struct ath_node_target *an,
454 a_int32_t shortPreamble,
460 struct ath_rc_series series[],
463 ath_rate_findrate(sc, an, 1, frameLen, 10, 4, 1,
464 ATH_RC_PROBE_ALLOWED, series, isProbe);
467 static void owl_tgt_tid_init(struct ath_atx_tid *tid)
471 tid->seq_start = tid->seq_next = 0;
472 tid->baw_size = WME_MAX_BA;
473 tid->baw_head = tid->baw_tail = 0;
476 tid->sched = AH_FALSE;
478 asf_tailq_init(&tid->buf_q);
480 for (i = 0; i < ATH_TID_MAX_BUFS; i++) {
481 TX_BUF_BITMAP_CLR(tid->tx_buf_bitmap, i);
485 static void owl_tgt_tid_cleanup(struct ath_softc_tgt *sc,
486 struct ath_atx_tid *tid)
493 tid->flag &= ~TID_CLEANUP_INPROGRES;
495 if (tid->flag & TID_REINITIALIZE) {
496 adf_os_print("TID REINIT DONE for tid %p\n", tid);
497 tid->flag &= ~TID_REINITIALIZE;
498 owl_tgt_tid_init(tid);
500 ath_aggr_resume_tid(sc, tid);
504 void owl_tgt_node_init(struct ath_node_target * an)
506 struct ath_atx_tid *tid;
509 for (tidno = 0, tid = &an->tid[tidno]; tidno < WME_NUM_TID;tidno++, tid++) {
513 if ( tid->flag & TID_CLEANUP_INPROGRES ) {
514 tid->flag |= TID_REINITIALIZE;
515 adf_os_print("tid[%p]->incomp is not 0: %d\n",
518 owl_tgt_tid_init(tid);
523 void ath_tx_status_clear(struct ath_softc_tgt *sc)
527 for (i = 0; i < 2; i++) {
528 sc->tx_status[i].cnt = 0;
532 static WMI_TXSTATUS_EVENT *ath_tx_status_get(struct ath_softc_tgt *sc)
534 WMI_TXSTATUS_EVENT *txs = NULL;
537 for (i = 0; i < 2; i++) {
538 if (sc->tx_status[i].cnt < HTC_MAX_TX_STATUS) {
539 txs = &sc->tx_status[i];
547 void ath_tx_status_update(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
549 struct ath_tx_desc *ds = bf->bf_lastds;
550 WMI_TXSTATUS_EVENT *txs;
552 if (sc->sc_tx_draining)
555 txs = ath_tx_status_get(sc);
559 txs->txstatus[txs->cnt].cookie = bf->bf_cookie;
560 txs->txstatus[txs->cnt].ts_rate = SM(bf->bf_endpt, ATH9K_HTC_TXSTAT_EPID);
562 if (ds->ds_txstat.ts_status & HAL_TXERR_FILT)
563 txs->txstatus[txs->cnt].ts_flags |= ATH9K_HTC_TXSTAT_FILT;
565 if (!(ds->ds_txstat.ts_status & HAL_TXERR_XRETRY) &&
566 !(ds->ds_txstat.ts_status & HAL_TXERR_FIFO) &&
567 !(ds->ds_txstat.ts_status & HAL_TXERR_TIMER_EXPIRED) &&
568 !(ds->ds_txstat.ts_status & HAL_TXERR_FILT))
569 txs->txstatus[txs->cnt].ts_flags |= ATH9K_HTC_TXSTAT_ACK;
571 ath_tx_status_update_rate(sc, bf->bf_rcs, ds->ds_txstat.ts_rate, txs);
576 void ath_tx_status_update_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
577 struct ath_tx_desc *ds, struct ath_rc_series rcs[],
580 WMI_TXSTATUS_EVENT *txs;
582 if (sc->sc_tx_draining)
585 txs = ath_tx_status_get(sc);
589 txs->txstatus[txs->cnt].cookie = bf->bf_cookie;
590 txs->txstatus[txs->cnt].ts_rate = SM(bf->bf_endpt, ATH9K_HTC_TXSTAT_EPID);
593 txs->txstatus[txs->cnt].ts_flags |= ATH9K_HTC_TXSTAT_ACK;
596 ath_tx_status_update_rate(sc, rcs, ds->ds_txstat.ts_rate, txs);
601 void ath_tx_status_send(struct ath_softc_tgt *sc)
605 if (sc->sc_tx_draining)
608 for (i = 0; i < 2; i++) {
609 if (sc->tx_status[i].cnt) {
610 wmi_event(sc->tgt_wmi_handle, WMI_TXSTATUS_EVENTID,
611 &sc->tx_status[i], sizeof(WMI_TXSTATUS_EVENT));
612 /* FIXME: Handle failures. */
613 sc->tx_status[i].cnt = 0;
618 static void owltgt_tx_process_cabq(struct ath_softc_tgt *sc, struct ath_txq *txq)
620 struct ath_hal *ah = sc->sc_ah;
621 ah->ah_setInterrupts(ah, sc->sc_imask & ~HAL_INT_SWBA);
622 owltgt_tx_processq(sc, txq, OWL_TXQ_ACTIVE);
623 ah->ah_setInterrupts(ah, sc->sc_imask);
626 void owl_tgt_tx_tasklet(TQUEUE_ARG data)
628 struct ath_softc_tgt *sc = (struct ath_softc_tgt *)data;
632 ath_tx_status_clear(sc);
634 for (i = 0; i < (HAL_NUM_TX_QUEUES - 6); i++) {
635 txq = ATH_TXQ(sc, i);
637 if (ATH_TXQ_SETUP(sc, i)) {
638 if (txq == sc->sc_cabq)
639 owltgt_tx_process_cabq(sc, txq);
641 owltgt_tx_processq(sc, txq, OWL_TXQ_ACTIVE);
645 ath_tx_status_send(sc);
648 void owltgt_tx_processq(struct ath_softc_tgt *sc, struct ath_txq *txq,
649 owl_txq_state_t txqstate)
651 struct ath_tx_buf *bf;
652 struct ath_tx_desc *ds;
656 if (asf_tailq_empty(&txq->axq_q)) {
657 txq->axq_link = NULL;
658 txq->axq_linkbuf = NULL;
662 bf = asf_tailq_first(&txq->axq_q);
665 status = ath_hal_txprocdesc(sc->sc_ah, ds);
667 if (status == HAL_EINPROGRESS) {
668 if (txqstate == OWL_TXQ_ACTIVE)
670 else if (txqstate == OWL_TXQ_STOPPED) {
671 __stats(sc, tx_stopfiltered);
672 ds->ds_txstat.ts_flags = 0;
673 ds->ds_txstat.ts_status = HAL_OK;
675 ds->ds_txstat.ts_flags = HAL_TX_SW_FILTERED;
679 ATH_TXQ_REMOVE_HEAD(txq, bf, bf_list);
680 if ((asf_tailq_empty(&txq->axq_q))) {
681 __stats(sc, tx_qnull);
682 txq->axq_link = NULL;
683 txq->axq_linkbuf = NULL;
689 ath_tx_status_update(sc, bf);
690 ath_buf_comp(sc, bf);
693 if (txqstate == OWL_TXQ_ACTIVE) {
694 ath_tgt_txq_schedule(sc, txq);
699 static struct ieee80211_frame* ATH_SKB2_WH(adf_nbuf_t skb)
704 adf_nbuf_peek_header(skb, &anbdata, &anblen);
705 return((struct ieee80211_frame *)anbdata);
709 ath_tgt_tid_drain(struct ath_softc_tgt *sc, struct ath_atx_tid *tid)
711 struct ath_tx_buf *bf;
713 while (!asf_tailq_empty(&tid->buf_q)) {
714 TAILQ_DEQ(&tid->buf_q, bf, bf_list);
715 ath_tx_freebuf(sc, bf);
718 tid->seq_next = tid->seq_start;
719 tid->baw_tail = tid->baw_head;
722 static void ath_tgt_tx_comp_normal(struct ath_softc_tgt *sc,
723 struct ath_tx_buf *bf)
725 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
726 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
728 if (tid->flag & TID_CLEANUP_INPROGRES) {
729 owl_tgt_tid_cleanup(sc, tid);
733 ath_tx_uc_comp(sc, bf);
736 ath_tx_freebuf(sc, bf);
739 static struct ieee80211_node_target * ath_tgt_find_node(struct ath_softc_tgt *sc,
740 a_int32_t node_index)
742 struct ath_node_target *an;
743 struct ieee80211_node_target *ni;
745 if (node_index > TARGET_NODE_MAX)
748 an = &sc->sc_sta[node_index];
752 if (ni->ni_vap == NULL) {
761 static struct ath_tx_buf* ath_tx_buf_alloc(struct ath_softc_tgt *sc)
763 struct ath_tx_buf *bf = NULL;
765 bf = asf_tailq_first(&sc->sc_txbuf);
767 adf_os_mem_set(&bf->bf_state, 0, sizeof(struct ath_buf_state));
768 asf_tailq_remove(&sc->sc_txbuf, bf, bf_list);
776 struct ath_tx_buf* ath_tgt_tx_prepare(struct ath_softc_tgt *sc,
777 adf_nbuf_t skb, ath_data_hdr_t *dh)
779 struct ath_tx_buf *bf;
780 struct ieee80211_node_target *ni;
781 struct ath_atx_tid *tid;
783 ni = ath_tgt_find_node(sc, dh->ni_index);
787 tid = ATH_AN_2_TID(ATH_NODE_TARGET(ni), dh->tidno);
788 if (tid->flag & TID_REINITIALIZE) {
789 adf_os_print("drop frame due to TID reinit\n");
793 bf = ath_tx_buf_alloc(sc);
795 __stats(sc, tx_nobufs);
799 bf->bf_tidno = dh->tidno;
800 bf->bf_txq = TID_TO_ACTXQ(bf->bf_tidno);
801 bf->bf_keytype = dh->keytype;
802 bf->bf_keyix = dh->keyix;
803 bf->bf_protmode = dh->flags & (IEEE80211_PROT_RTSCTS | IEEE80211_PROT_CTSONLY);
806 adf_nbuf_queue_add(&bf->bf_skbhead, skb);
807 skb = adf_nbuf_queue_first(&(bf->bf_skbhead));
809 if (adf_nbuf_queue_len(&(bf->bf_skbhead)) == 0) {
810 __stats(sc, tx_noskbs);
818 ath_tgt_txbuf_setup(sc, bf, dh);
820 ath_tx_tgt_setds(sc, bf);
825 static void ath_tgt_tx_seqno_normal(struct ath_tx_buf *bf)
827 struct ieee80211_node_target *ni = bf->bf_node;
828 struct ath_node_target *an = ATH_NODE_TARGET(ni);
829 struct ieee80211_frame *wh = ATH_SKB_2_WH(bf->bf_skb);
830 struct ath_atx_tid *tid = ATH_AN_2_TID(an, bf->bf_tidno);
832 u_int8_t fragno = (wh->i_seq[0] & 0xf);
834 INCR(ni->ni_txseqmgmt, IEEE80211_SEQ_MAX);
836 bf->bf_seqno = (tid->seq_next << IEEE80211_SEQ_SEQ_SHIFT);
838 *(u_int16_t *)wh->i_seq = adf_os_cpu_to_le16(bf->bf_seqno);
839 wh->i_seq[0] |= fragno;
841 if (!(wh->i_fc[1] & IEEE80211_FC1_MORE_FRAG))
842 INCR(tid->seq_next, IEEE80211_SEQ_MAX);
845 static a_int32_t ath_key_setup(struct ieee80211_node_target *ni,
846 struct ath_tx_buf *bf)
848 struct ieee80211_frame *wh = ATH_SKB_2_WH(bf->bf_skb);
850 if (!(wh->i_fc[1] & IEEE80211_FC1_WEP)) {
851 bf->bf_keytype = HAL_KEY_TYPE_CLEAR;
852 bf->bf_keyix = HAL_TXKEYIX_INVALID;
856 switch (bf->bf_keytype) {
857 case HAL_KEY_TYPE_WEP:
858 bf->bf_pktlen += IEEE80211_WEP_ICVLEN;
860 case HAL_KEY_TYPE_AES:
861 bf->bf_pktlen += IEEE80211_WEP_MICLEN;
863 case HAL_KEY_TYPE_TKIP:
864 bf->bf_pktlen += IEEE80211_WEP_ICVLEN;
870 if (bf->bf_keytype == HAL_KEY_TYPE_AES ||
871 bf->bf_keytype == HAL_KEY_TYPE_TKIP)
872 ieee80211_tgt_crypto_encap(wh, ni, bf->bf_keytype);
877 static void ath_tgt_txq_add_ucast(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
879 struct ath_hal *ah = sc->sc_ah;
882 volatile a_int32_t txe_val;
888 status = ath_hal_txprocdesc(sc->sc_ah, bf->bf_lastds);
890 ATH_TXQ_INSERT_TAIL(txq, bf, bf_list);
892 if (txq->axq_link == NULL) {
893 ah->ah_setTxDP(ah, txq->axq_qnum, ATH_BUF_GET_DESC_PHY_ADDR(bf));
895 *txq->axq_link = ATH_BUF_GET_DESC_PHY_ADDR(bf);
897 txe_val = OS_REG_READ(ah, 0x840);
898 if (!(txe_val & (1<< txq->axq_qnum)))
899 ah->ah_setTxDP(ah, txq->axq_qnum, ATH_BUF_GET_DESC_PHY_ADDR(bf));
902 txq->axq_link = &bf->bf_lastds->ds_link;
903 ah->ah_startTxDma(ah, txq->axq_qnum);
906 static a_int32_t ath_tgt_txbuf_setup(struct ath_softc_tgt *sc,
907 struct ath_tx_buf *bf,
911 struct ieee80211_frame *wh = ATH_SKB2_WH(bf->bf_skb);
913 a_uint32_t flags = adf_os_ntohl(dh->flags);
915 ath_tgt_tx_seqno_normal(bf);
917 bf->bf_txq_add = ath_tgt_txq_add_ucast;
918 bf->bf_hdrlen = ieee80211_anyhdrsize(wh);
919 bf->bf_pktlen = ath_get_pktlen(bf, bf->bf_hdrlen);
920 bf->bf_ismcast = IEEE80211_IS_MULTICAST(wh->i_addr1);
922 if ((retval = ath_key_setup(bf->bf_node, bf)) < 0)
925 if (flags & ATH_SHORT_PREAMBLE)
926 bf->bf_shpream = AH_TRUE;
928 bf->bf_shpream = AH_FALSE;
930 bf->bf_flags = HAL_TXDESC_CLRDMASK;
931 bf->bf_atype = HAL_PKT_TYPE_NORMAL;
937 ath_get_pktlen(struct ath_tx_buf *bf, a_int32_t hdrlen)
939 adf_nbuf_t skb = bf->bf_skb;
942 skb = adf_nbuf_queue_first(&bf->bf_skbhead);
943 pktlen = adf_nbuf_len(skb);
945 pktlen -= (hdrlen & 3);
946 pktlen += IEEE80211_CRC_LEN;
952 ath_tgt_tx_send_normal(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
954 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
955 struct ath_rc_series rcs[4];
956 struct ath_rc_series mrcs[4];
957 a_int32_t shortPreamble = 0;
958 a_int32_t isProbe = 0;
960 adf_os_mem_set(rcs, 0, sizeof(struct ath_rc_series)*4 );
961 adf_os_mem_set(mrcs, 0, sizeof(struct ath_rc_series)*4 );
963 if (!bf->bf_ismcast) {
964 ath_tgt_rate_findrate(sc, an, shortPreamble,
967 ath_hal_memcpy(bf->bf_rcs, rcs, sizeof(rcs));
969 mrcs[1].tries = mrcs[2].tries = mrcs[3].tries = 0;
970 mrcs[1].rix = mrcs[2].rix = mrcs[3].rix = 0;
974 ath_hal_memcpy(bf->bf_rcs, mrcs, sizeof(mrcs));
977 ath_buf_set_rate(sc, bf);
978 bf->bf_txq_add(sc, bf);
982 ath_tx_freebuf(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
985 struct ath_tx_desc *bfd = NULL;
986 struct ath_hal *ah = sc->sc_ah;
988 for (bfd = bf->bf_desc, i = 0; i < bf->bf_dmamap_info.nsegs; bfd++, i++) {
989 ah->ah_clr11nAggr(ah, bfd);
990 ah->ah_set11nBurstDuration(ah, bfd, 0);
991 ath_hal_set11n_virtualmorefrag(sc->sc_ah, bfd, 0);
994 ath_dma_unmap(sc, bf);
996 ath_tgt_skb_free(sc, &bf->bf_skbhead,bf->bf_endpt);
1002 bf = ath_buf_toggle(sc, bf, 0);
1004 bf->bf_isretried = 0;
1007 asf_tailq_insert_tail(&sc->sc_txbuf, bf, bf_list);
1011 ath_tx_uc_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1013 ath_tx_status_update(sc, bf);
1014 ath_update_stats(sc, bf);
1015 ath_rate_tx_complete(sc, ATH_NODE_TARGET(bf->bf_node),
1016 bf->bf_lastds, bf->bf_rcs, 1, 0);
1020 ath_update_stats(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1022 struct ath_tx_desc *ds = bf->bf_desc;
1025 if (ds->ds_txstat.ts_status == 0) {
1026 if (ds->ds_txstat.ts_rate & HAL_TXSTAT_ALTRATE)
1027 sc->sc_tx_stats.ast_tx_altrate++;
1029 if (ds->ds_txstat.ts_status & HAL_TXERR_XRETRY)
1030 sc->sc_tx_stats.ast_tx_xretries++;
1031 if (ds->ds_txstat.ts_status & HAL_TXERR_FIFO)
1032 sc->sc_tx_stats.ast_tx_fifoerr++;
1033 if (ds->ds_txstat.ts_status & HAL_TXERR_FILT)
1034 sc->sc_tx_stats.ast_tx_filtered++;
1035 if (ds->ds_txstat.ts_status & HAL_TXERR_TIMER_EXPIRED)
1036 sc->sc_tx_stats.ast_tx_timer_exp++;
1038 sr = ds->ds_txstat.ts_shortretry;
1039 lr = ds->ds_txstat.ts_longretry;
1040 sc->sc_tx_stats.ast_tx_shortretry += sr;
1041 sc->sc_tx_stats.ast_tx_longretry += lr;
1045 ath_tgt_send_mgt(struct ath_softc_tgt *sc,adf_nbuf_t hdr_buf, adf_nbuf_t skb,
1046 HTC_ENDPOINT_ID endpt)
1048 struct ieee80211_node_target *ni;
1049 struct ieee80211vap_target *vap;
1050 struct ath_vap_target *avp;
1051 struct ath_hal *ah = sc->sc_ah;
1052 a_uint8_t rix, txrate, ctsrate, cix = 0xff, *data;
1053 a_uint32_t ivlen = 0, icvlen = 0, subtype, flags, ctsduration;
1054 a_int32_t i, iswep, ismcast, hdrlen, pktlen, try0, len;
1055 struct ath_tx_desc *ds=NULL;
1056 struct ath_txq *txq=NULL;
1057 struct ath_tx_buf *bf;
1059 const HAL_RATE_TABLE *rt;
1060 HAL_BOOL shortPreamble;
1061 struct ieee80211_frame *wh;
1062 struct ath_rc_series rcs[4];
1063 HAL_11N_RATE_SERIES series[4];
1068 adf_nbuf_peek_header(skb, &data, &len);
1069 adf_nbuf_pull_head(skb, sizeof(ath_mgt_hdr_t));
1071 adf_nbuf_peek_header(hdr_buf, &data, &len);
1074 adf_os_assert(len >= sizeof(ath_mgt_hdr_t));
1076 mh = (ath_mgt_hdr_t *)data;
1077 adf_nbuf_peek_header(skb, &data, &len);
1078 wh = (struct ieee80211_frame *)data;
1080 adf_os_mem_set(rcs, 0, sizeof(struct ath_rc_series)*4);
1081 adf_os_mem_set(series, 0, sizeof(HAL_11N_RATE_SERIES)*4);
1083 bf = asf_tailq_first(&sc->sc_txbuf);
1087 asf_tailq_remove(&sc->sc_txbuf, bf, bf_list);
1089 ni = ath_tgt_find_node(sc, mh->ni_index);
1093 bf->bf_endpt = endpt;
1094 bf->bf_cookie = mh->cookie;
1095 bf->bf_protmode = mh->flags & (IEEE80211_PROT_RTSCTS | IEEE80211_PROT_CTSONLY);
1096 txq = &sc->sc_txq[1];
1097 iswep = wh->i_fc[1] & IEEE80211_FC1_WEP;
1098 ismcast = IEEE80211_IS_MULTICAST(wh->i_addr1);
1099 hdrlen = ieee80211_anyhdrsize(wh);
1101 keyix = HAL_TXKEYIX_INVALID;
1102 pktlen -= (hdrlen & 3);
1103 pktlen += IEEE80211_CRC_LEN;
1108 adf_nbuf_map(sc->sc_dev, bf->bf_dmamap, skb, ADF_OS_DMA_TO_DEVICE);
1111 adf_nbuf_queue_add(&bf->bf_skbhead, skb);
1114 rt = sc->sc_currates;
1115 adf_os_assert(rt != NULL);
1117 if (mh->flags == ATH_SHORT_PREAMBLE)
1118 shortPreamble = AH_TRUE;
1120 shortPreamble = AH_FALSE;
1122 flags = HAL_TXDESC_CLRDMASK;
1124 switch (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) {
1125 case IEEE80211_FC0_TYPE_MGT:
1126 subtype = wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK;
1128 if (subtype == IEEE80211_FC0_SUBTYPE_PROBE_RESP)
1129 atype = HAL_PKT_TYPE_PROBE_RESP;
1130 else if (subtype == IEEE80211_FC0_SUBTYPE_ATIM)
1131 atype = HAL_PKT_TYPE_ATIM;
1133 atype = HAL_PKT_TYPE_NORMAL;
1137 atype = HAL_PKT_TYPE_NORMAL;
1141 avp = &sc->sc_vap[mh->vap_index];
1143 rcs[0].rix = ath_get_minrateidx(sc, avp);
1144 rcs[0].tries = ATH_TXMAXTRY;
1147 adf_os_mem_copy(bf->bf_rcs, rcs, sizeof(rcs));
1149 try0 = rcs[0].tries;
1150 txrate = rt->info[rix].rateCode;
1153 txrate |= rt->info[rix].shortPreamble;
1160 flags |= HAL_TXDESC_NOACK;
1162 } else if (pktlen > vap->iv_rtsthreshold) {
1163 flags |= HAL_TXDESC_RTSENA;
1164 cix = rt->info[rix].controlRate;
1167 if ((bf->bf_protmode != IEEE80211_PROT_NONE) &&
1168 rt->info[rix].phy == IEEE80211_T_OFDM &&
1169 (flags & HAL_TXDESC_NOACK) == 0) {
1170 cix = rt->info[sc->sc_protrix].controlRate;
1171 sc->sc_tx_stats.ast_tx_protect++;
1174 *(a_uint16_t *)&wh->i_seq[0] = adf_os_cpu_to_le16(ni->ni_txseqmgmt <<
1175 IEEE80211_SEQ_SEQ_SHIFT);
1176 INCR(ni->ni_txseqmgmt, IEEE80211_SEQ_MAX);
1179 if (flags & (HAL_TXDESC_RTSENA|HAL_TXDESC_CTSENA)) {
1180 adf_os_assert(cix != 0xff);
1181 ctsrate = rt->info[cix].rateCode;
1182 if (shortPreamble) {
1183 ctsrate |= rt->info[cix].shortPreamble;
1184 if (flags & HAL_TXDESC_RTSENA) /* SIFS + CTS */
1185 ctsduration += rt->info[cix].spAckDuration;
1186 if ((flags & HAL_TXDESC_NOACK) == 0) /* SIFS + ACK */
1187 ctsduration += rt->info[cix].spAckDuration;
1189 if (flags & HAL_TXDESC_RTSENA) /* SIFS + CTS */
1190 ctsduration += rt->info[cix].lpAckDuration;
1191 if ((flags & HAL_TXDESC_NOACK) == 0) /* SIFS + ACK */
1192 ctsduration += rt->info[cix].lpAckDuration;
1194 ctsduration += ath_hal_computetxtime(ah,
1195 rt, pktlen, rix, shortPreamble);
1200 flags |= HAL_TXDESC_INTREQ;
1202 ath_hal_setuptxdesc(ah, ds
1215 , ATH_COMP_PROC_NO_COMP_NO_CCS);
1217 bf->bf_flags = flags;
1220 * Set key type in tx desc while sending the encrypted challenge to AP
1221 * in Auth frame 3 of Shared Authentication, owl needs this.
1223 if (iswep && (keyix != HAL_TXKEYIX_INVALID) &&
1224 (wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) == IEEE80211_FC0_SUBTYPE_AUTH)
1225 ath_hal_fillkeytxdesc(ah, ds, mh->keytype);
1227 ath_filltxdesc(sc, bf);
1229 for (i=0; i<4; i++) {
1230 series[i].Tries = 2;
1231 series[i].Rate = txrate;
1232 series[i].ChSel = sc->sc_ic.ic_tx_chainmask;
1233 series[i].RateFlags = 0;
1235 ah->ah_set11nRateScenario(ah, ds, 0, ctsrate, ctsduration, series, 4, 0);
1236 ath_tgt_txqaddbuf(sc, txq, bf, bf->bf_lastds);
1240 HTC_ReturnBuffers(sc->tgt_htc_handle, endpt, skb);
1245 ath_tgt_txqaddbuf(struct ath_softc_tgt *sc,
1246 struct ath_txq *txq, struct ath_tx_buf *bf,
1247 struct ath_tx_desc *lastds)
1249 struct ath_hal *ah = sc->sc_ah;
1251 ATH_TXQ_INSERT_TAIL(txq, bf, bf_list);
1253 if (txq->axq_link == NULL) {
1254 ah->ah_setTxDP(ah, txq->axq_qnum, ATH_BUF_GET_DESC_PHY_ADDR(bf));
1256 *txq->axq_link = ATH_BUF_GET_DESC_PHY_ADDR(bf);
1259 txq->axq_link = &lastds->ds_link;
1260 ah->ah_startTxDma(ah, txq->axq_qnum);
1263 void ath_tgt_handle_normal(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1266 struct ath_node_target *an;
1268 an = (struct ath_node_target *)bf->bf_node;
1271 tid = &an->tid[bf->bf_tidno];
1274 bf->bf_comp = ath_tgt_tx_comp_normal;
1275 INCR(tid->seq_start, IEEE80211_SEQ_MAX);
1276 ath_tgt_tx_send_normal(sc, bf);
1280 ath_tgt_tx_enqueue(struct ath_txq *txq, struct ath_atx_tid *tid)
1288 tid->sched = AH_TRUE;
1289 asf_tailq_insert_tail(&txq->axq_tidq, tid, tid_qelem);
1293 ath_tgt_txq_schedule(struct ath_softc_tgt *sc, struct ath_txq *txq)
1295 struct ath_atx_tid *tid;
1301 TAILQ_DEQ(&txq->axq_tidq, tid, tid_qelem);
1306 tid->sched = AH_FALSE;
1311 if (!(tid->flag & TID_AGGR_ENABLED))
1312 ath_tgt_tx_sched_normal(sc,tid);
1314 ath_tgt_tx_sched_aggr(sc,tid);
1318 if (!asf_tailq_empty(&tid->buf_q)) {
1319 ath_tgt_tx_enqueue(txq, tid);
1322 } while (!asf_tailq_empty(&txq->axq_tidq) && !bdone);
1326 ath_tgt_handle_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1329 struct ath_node_target *an;
1330 struct ath_txq *txq = bf->bf_txq;
1331 a_bool_t queue_frame, within_baw;
1333 an = (struct ath_node_target *)bf->bf_node;
1336 tid = &an->tid[bf->bf_tidno];
1339 bf->bf_comp = ath_tgt_tx_comp_aggr;
1341 within_baw = BAW_WITHIN(tid->seq_start, tid->baw_size,
1342 SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1344 queue_frame = ( (txq->axq_depth >= ATH_AGGR_MIN_QDEPTH) ||
1345 (!asf_tailq_empty(&tid->buf_q)) ||
1346 (tid->paused) || (!within_baw) );
1349 asf_tailq_insert_tail(&tid->buf_q, bf, bf_list);
1350 ath_tgt_tx_enqueue(txq, tid);
1352 ath_tx_addto_baw(tid, bf);
1353 __stats(sc, txaggr_nframes);
1354 ath_tgt_tx_send_normal(sc, bf);
1359 ath_tgt_tx_sched_normal(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
1361 struct ath_tx_buf *bf;
1362 struct ath_txq *txq =TID_TO_ACTXQ(tid->tidno);;
1365 if (asf_tailq_empty(&tid->buf_q))
1368 bf = asf_tailq_first(&tid->buf_q);
1369 asf_tailq_remove(&tid->buf_q, bf, bf_list);
1370 ath_tgt_tx_send_normal(sc, bf);
1372 } while (txq->axq_depth < ATH_AGGR_MIN_QDEPTH);
1376 ath_tgt_tx_sched_aggr(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
1378 struct ath_tx_buf *bf, *bf_last;
1379 ATH_AGGR_STATUS status;
1380 ath_tx_bufhead bf_q;
1381 struct ath_txq *txq = TID_TO_ACTXQ(tid->tidno);
1382 struct ath_tx_desc *ds = NULL;
1383 struct ath_hal *ah = sc->sc_ah;
1387 if (asf_tailq_empty(&tid->buf_q))
1391 if (asf_tailq_empty(&tid->buf_q))
1394 asf_tailq_init(&bf_q);
1396 status = ath_tgt_tx_form_aggr(sc, tid, &bf_q);
1398 if (asf_tailq_empty(&bf_q))
1401 bf = asf_tailq_first(&bf_q);
1402 bf_last = asf_tailq_last(&bf_q, ath_tx_bufhead_s);
1404 if (bf->bf_nframes == 1) {
1406 if(bf->bf_retries == 0)
1407 __stats(sc, txaggr_single);
1409 bf->bf_lastds = &(bf->bf_descarr[bf->bf_dmamap_info.nsegs -1]);
1410 bf->bf_lastds->ds_link = 0;
1413 for(ds = bf->bf_desc; ds <= bf->bf_lastds; ds++)
1414 ah->ah_clr11nAggr(ah, ds);
1416 ath_buf_set_rate(sc, bf);
1417 bf->bf_txq_add(sc, bf);
1422 bf_last->bf_next = NULL;
1423 bf_last->bf_lastds->ds_link = 0;
1424 bf_last->bf_ndelim = 0;
1427 ath_buf_set_rate(sc, bf);
1428 ah->ah_set11nAggrFirst(ah, bf->bf_desc, bf->bf_al,
1430 bf->bf_lastds = bf_last->bf_lastds;
1432 for (i = 0; i < bf_last->bf_dmamap_info.nsegs; i++)
1433 ah->ah_set11nAggrLast(ah, &bf_last->bf_descarr[i]);
1435 if (status == ATH_AGGR_8K_LIMITED) {
1440 bf->bf_txq_add(sc, bf);
1441 } while (txq->axq_depth < ATH_AGGR_MIN_QDEPTH &&
1442 status != ATH_TGT_AGGR_BAW_CLOSED);
1445 static u_int32_t ath_lookup_rate(struct ath_softc_tgt *sc,
1446 struct ath_node_target *an,
1447 struct ath_tx_buf *bf)
1450 u_int32_t max4msframelen, frame_length;
1451 u_int16_t aggr_limit, legacy=0;
1452 const HAL_RATE_TABLE *rt = sc->sc_currates;
1453 struct ieee80211_node_target *ieee_node = (struct ieee80211_node_target *)an;
1455 if (bf->bf_ismcast) {
1456 bf->bf_rcs[1].tries = bf->bf_rcs[2].tries = bf->bf_rcs[3].tries = 0;
1457 bf->bf_rcs[0].rix = 0xb;
1458 bf->bf_rcs[0].tries = ATH_TXMAXTRY - 1;
1459 bf->bf_rcs[0].flags = 0;
1461 ath_tgt_rate_findrate(sc, an, AH_TRUE, 0, ATH_TXMAXTRY-1, 4, 1,
1462 ATH_RC_PROBE_ALLOWED, bf->bf_rcs, &prate);
1465 max4msframelen = IEEE80211_AMPDU_LIMIT_MAX;
1467 for (i = 0; i < 4; i++) {
1468 if (bf->bf_rcs[i].tries) {
1469 frame_length = bf->bf_rcs[i].max4msframelen;
1471 if (rt->info[bf->bf_rcs[i].rix].phy != IEEE80211_T_HT) {
1476 max4msframelen = ATH_MIN(max4msframelen, frame_length);
1480 if (prate || legacy)
1483 if (sc->sc_ic.ic_enable_coex)
1484 aggr_limit = ATH_MIN((max4msframelen*3)/8, sc->sc_ic.ic_ampdu_limit);
1486 aggr_limit = ATH_MIN(max4msframelen, sc->sc_ic.ic_ampdu_limit);
1488 if (ieee_node->ni_maxampdu)
1489 aggr_limit = ATH_MIN(aggr_limit, ieee_node->ni_maxampdu);
1494 int ath_tgt_tx_form_aggr(struct ath_softc_tgt *sc, ath_atx_tid_t *tid,
1495 ath_tx_bufhead *bf_q)
1497 struct ath_tx_buf *bf_first ,*bf_prev = NULL;
1498 int nframes = 0, rl = 0;;
1499 struct ath_tx_desc *ds = NULL;
1500 struct ath_tx_buf *bf;
1501 struct ath_hal *ah = sc->sc_ah;
1502 u_int16_t aggr_limit = (64*1024 -1), al = 0, bpad = 0, al_delta;
1503 u_int16_t h_baw = tid->baw_size/2, prev_al = 0, prev_frames = 0;
1505 bf_first = asf_tailq_first(&tid->buf_q);
1508 bf = asf_tailq_first(&tid->buf_q);
1511 if (!BAW_WITHIN(tid->seq_start, tid->baw_size,
1512 SEQNO_FROM_BF_SEQNO(bf->bf_seqno))) {
1514 bf_first->bf_al= al;
1515 bf_first->bf_nframes = nframes;
1516 return ATH_TGT_AGGR_BAW_CLOSED;
1520 aggr_limit = ath_lookup_rate(sc, tid->an, bf);
1524 al_delta = ATH_AGGR_DELIM_SZ + bf->bf_pktlen;
1526 if (nframes && (aggr_limit < (al + bpad + al_delta + prev_al))) {
1527 bf_first->bf_al= al;
1528 bf_first->bf_nframes = nframes;
1529 return ATH_TGT_AGGR_LIMITED;
1533 if ((nframes + prev_frames) >= ATH_MIN((h_baw), 17)) {
1535 if ((nframes + prev_frames) >= ATH_MIN((h_baw), 22)) {
1537 bf_first->bf_al= al;
1538 bf_first->bf_nframes = nframes;
1539 return ATH_TGT_AGGR_LIMITED;
1542 ath_tx_addto_baw(tid, bf);
1543 asf_tailq_remove(&tid->buf_q, bf, bf_list);
1544 asf_tailq_insert_tail(bf_q, bf, bf_list);
1549 adf_os_assert(bf->bf_comp == ath_tgt_tx_comp_aggr);
1551 al += bpad + al_delta;
1552 bf->bf_ndelim = ATH_AGGR_GET_NDELIM(bf->bf_pktlen);
1554 switch (bf->bf_keytype) {
1555 case HAL_KEY_TYPE_AES:
1556 bf->bf_ndelim += ATH_AGGR_ENCRYPTDELIM;
1558 case HAL_KEY_TYPE_WEP:
1559 case HAL_KEY_TYPE_TKIP:
1560 bf->bf_ndelim += 64;
1562 case HAL_KEY_TYPE_WAPI:
1563 bf->bf_ndelim += 12;
1569 bpad = PADBYTES(al_delta) + (bf->bf_ndelim << 2);
1572 bf_prev->bf_next = bf;
1573 bf_prev->bf_lastds->ds_link = ATH_BUF_GET_DESC_PHY_ADDR(bf);
1577 for(ds = bf->bf_desc; ds <= bf->bf_lastds; ds++)
1578 ah->ah_set11nAggrMiddle(ah, ds, bf->bf_ndelim);
1580 } while (!asf_tailq_empty(&tid->buf_q));
1582 bf_first->bf_al= al;
1583 bf_first->bf_nframes = nframes;
1585 return ATH_TGT_AGGR_DONE;
1588 void ath_tx_addto_baw(ath_atx_tid_t *tid, struct ath_tx_buf *bf)
1592 if (bf->bf_isretried) {
1596 index = ATH_BA_INDEX(tid->seq_start, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1597 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1);
1599 TX_BUF_BITMAP_SET(tid->tx_buf_bitmap, cindex);
1601 if (index >= ((tid->baw_tail - tid->baw_head) & (ATH_TID_MAX_BUFS - 1))) {
1602 tid->baw_tail = cindex;
1603 INCR(tid->baw_tail, ATH_TID_MAX_BUFS);
1607 void ath_tgt_tx_comp_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1609 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1610 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1611 struct ath_tx_desc lastds;
1612 struct ath_tx_desc *ds = &lastds;
1613 struct ath_rc_series rcs[4];
1618 int nframes = bf->bf_nframes;
1619 struct ath_tx_buf *bf_next;
1620 ath_tx_bufhead bf_q;
1622 struct ath_tx_buf *bar = NULL;
1623 struct ath_txq *txq;
1627 if (tid->flag & TID_CLEANUP_INPROGRES) {
1628 ath_tx_comp_cleanup(sc, bf);
1632 adf_os_mem_copy(ds, bf->bf_lastds, sizeof (struct ath_tx_desc));
1633 adf_os_mem_copy(rcs, bf->bf_rcs, sizeof(rcs));
1635 if (ds->ds_txstat.ts_flags == HAL_TX_SW_FILTERED) {
1640 if (!bf->bf_isaggr) {
1641 ath_tx_comp_unaggr(sc, bf);
1645 __stats(sc, tx_compaggr);
1647 asf_tailq_init(&bf_q);
1649 seq_st = ATH_DS_BA_SEQ(ds);
1650 ba = ATH_DS_BA_BITMAP(ds);
1651 tx_ok = (ATH_DS_TX_STATUS(ds) == HAL_OK);
1653 if (ATH_DS_TX_STATUS(ds) & HAL_TXERR_XRETRY) {
1654 ath_tx_comp_aggr_error(sc, bf, tid);
1658 if (tx_ok && !ATH_DS_TX_BA(ds)) {
1659 __stats(sc, txaggr_babug);
1660 adf_os_print("BA Bug?\n");
1661 ath_tx_comp_aggr_error(sc, bf, tid);
1666 ba_index = ATH_BA_INDEX(seq_st, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1667 bf_next = bf->bf_next;
1669 if (tx_ok && ATH_BA_ISSET(ba, ba_index)) {
1670 __stats(sc, txaggr_compgood);
1671 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1672 ath_tx_status_update_aggr(sc, bf, ds, rcs, 1);
1673 ath_tx_freebuf(sc, bf);
1675 ath_tx_retry_subframe(sc, bf, &bf_q, &bar);
1681 ath_update_aggr_stats(sc, ds, nframes, nbad);
1682 ath_rate_tx_complete(sc, an, ds, rcs, nframes, nbad);
1685 ath_bar_tx(sc, tid, bar);
1688 if (!asf_tailq_empty(&bf_q)) {
1689 __stats(sc, txaggr_prepends);
1690 TAILQ_INSERTQ_HEAD(&tid->buf_q, &bf_q, bf_list);
1691 ath_tgt_tx_enqueue(txq, tid);
1696 ath_tx_comp_aggr_error(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
1701 struct ath_tx_desc lastds;
1702 struct ath_tx_desc *ds = &lastds;
1703 struct ath_rc_series rcs[4];
1704 struct ath_tx_buf *bar = NULL;
1705 struct ath_tx_buf *bf_next;
1706 int nframes = bf->bf_nframes;
1707 ath_tx_bufhead bf_q;
1708 struct ath_txq *txq;
1710 asf_tailq_init(&bf_q);
1713 adf_os_mem_copy(ds, bf->bf_lastds, sizeof (struct ath_tx_desc));
1714 adf_os_mem_copy(rcs, bf->bf_rcs, sizeof(rcs));
1717 bf_next = bf->bf_next;
1718 ath_tx_retry_subframe(sc, bf, &bf_q, &bar);
1722 ath_update_aggr_stats(sc, ds, nframes, nframes);
1723 ath_rate_tx_complete(sc, tid->an, ds, rcs, nframes, nframes);
1726 ath_bar_tx(sc, tid, bar);
1729 if (!asf_tailq_empty(&bf_q)) {
1730 __stats(sc, txaggr_prepends);
1731 TAILQ_INSERTQ_HEAD(&tid->buf_q, &bf_q, bf_list);
1732 ath_tgt_tx_enqueue(txq, tid);
1737 ath_tx_comp_cleanup(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1740 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1741 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1742 struct ath_tx_desc lastds;
1743 struct ath_tx_desc *ds = &lastds;
1744 struct ath_rc_series rcs[4];
1749 int nframes = bf->bf_nframes;
1750 struct ath_tx_buf *bf_next;
1753 adf_os_mem_copy(ds, bf->bf_lastds, sizeof (struct ath_tx_desc));
1754 adf_os_mem_copy(rcs, bf->bf_rcs, sizeof(rcs));
1756 seq_st = ATH_DS_BA_SEQ(ds);
1757 ba = ATH_DS_BA_BITMAP(ds);
1758 tx_ok = (ATH_DS_TX_STATUS(ds) == HAL_OK);
1760 if (!bf->bf_isaggr) {
1761 ath_update_stats(sc, bf);
1763 __stats(sc, tx_compunaggr);
1765 ath_tx_status_update(sc, bf);
1767 ath_tx_freebuf(sc, bf);
1769 if (tid->flag & TID_CLEANUP_INPROGRES) {
1770 owl_tgt_tid_cleanup(sc, tid);
1778 ba_index = ATH_BA_INDEX(seq_st, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1779 bf_next = bf->bf_next;
1781 ath_tx_status_update_aggr(sc, bf, ds, rcs, 0);
1783 ath_tx_freebuf(sc, bf);
1787 tid->flag &= ~TID_CLEANUP_INPROGRES;
1788 ath_aggr_resume_tid(sc, tid);
1795 ath_update_aggr_stats(sc, ds, nframes, nbad);
1796 ath_rate_tx_complete(sc, an, ds, rcs, nframes, nbad);
1800 ath_tx_retry_subframe(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
1801 ath_tx_bufhead *bf_q, struct ath_tx_buf **bar)
1804 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1805 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1806 struct ath_tx_desc *ds = NULL;
1807 struct ath_hal *ah = sc->sc_ah;
1810 __stats(sc, txaggr_compretries);
1812 for(ds = bf->bf_desc, i = 0; i < bf->bf_dmamap_info.nsegs; ds++, i++) {
1813 ah->ah_clr11nAggr(ah, ds);
1814 ah->ah_set11nBurstDuration(ah, ds, 0);
1815 ath_hal_set11n_virtualmorefrag(sc->sc_ah, ds, 0);
1818 if (bf->bf_retries >= OWLMAX_RETRIES) {
1819 __stats(sc, txaggr_xretries);
1820 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1821 ath_tx_status_update_aggr(sc, bf, bf->bf_lastds, NULL, 0);
1826 ath_tx_freebuf(sc, bf);
1831 __stats(sc, txaggr_errlast);
1832 bf = ath_buf_toggle(sc, bf, 1);
1834 bf->bf_lastds = &(bf->bf_descarr[bf->bf_dmamap_info.nsegs - 1]);
1836 ath_tx_set_retry(sc, bf);
1837 asf_tailq_insert_tail(bf_q, bf, bf_list);
1841 ath_update_aggr_stats(struct ath_softc_tgt *sc,
1842 struct ath_tx_desc *ds, int nframes,
1846 u_int8_t status = ATH_DS_TX_STATUS(ds);
1847 u_int8_t txflags = ATH_DS_TX_FLAGS(ds);
1849 __statsn(sc, txaggr_longretries, ds->ds_txstat.ts_longretry);
1850 __statsn(sc, txaggr_shortretries, ds->ds_txstat.ts_shortretry);
1852 if (txflags & HAL_TX_DESC_CFG_ERR)
1853 __stats(sc, txaggr_desc_cfgerr);
1855 if (txflags & HAL_TX_DATA_UNDERRUN)
1856 __stats(sc, txaggr_data_urun);
1858 if (txflags & HAL_TX_DELIM_UNDERRUN)
1859 __stats(sc, txaggr_delim_urun);
1865 if (status & HAL_TXERR_XRETRY)
1866 __stats(sc, txaggr_compxretry);
1868 if (status & HAL_TXERR_FILT)
1869 __stats(sc, txaggr_filtered);
1871 if (status & HAL_TXERR_FIFO)
1872 __stats(sc, txaggr_fifo);
1874 if (status & HAL_TXERR_XTXOP)
1875 __stats(sc, txaggr_xtxop);
1877 if (status & HAL_TXERR_TIMER_EXPIRED)
1878 __stats(sc, txaggr_timer_exp);
1882 ath_tx_comp_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1884 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1885 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1886 struct ath_tx_desc *ds = bf->bf_lastds;
1888 ath_update_stats(sc, bf);
1889 ath_rate_tx_complete(sc, an, ds, bf->bf_rcs, 1, 0);
1891 if (ATH_DS_TX_STATUS(ds) & HAL_TXERR_XRETRY) {
1892 ath_tx_retry_unaggr(sc, bf);
1895 __stats(sc, tx_compunaggr);
1897 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1898 ath_tx_status_update(sc, bf);
1899 ath_tx_freebuf(sc, bf);
1903 ath_tx_retry_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1905 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1906 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1907 struct ath_txq *txq;
1911 if (bf->bf_retries >= OWLMAX_RETRIES) {
1912 __stats(sc, txunaggr_xretry);
1913 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1914 ath_tx_status_update(sc, bf);
1915 ath_bar_tx(sc, tid, bf);
1919 __stats(sc, txunaggr_compretries);
1920 if (!bf->bf_lastds->ds_link) {
1921 __stats(sc, txunaggr_errlast);
1922 bf = ath_buf_toggle(sc, bf, 1);
1925 ath_tx_set_retry(sc, bf);
1926 asf_tailq_insert_head(&tid->buf_q, bf, bf_list);
1927 ath_tgt_tx_enqueue(txq, tid);
1931 ath_tx_update_baw(ath_atx_tid_t *tid, int seqno)
1936 index = ATH_BA_INDEX(tid->seq_start, seqno);
1937 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1);
1939 TX_BUF_BITMAP_CLR(tid->tx_buf_bitmap, cindex);
1941 while (tid->baw_head != tid->baw_tail &&
1942 (!TX_BUF_BITMAP_IS_SET(tid->tx_buf_bitmap, tid->baw_head))) {
1943 INCR(tid->seq_start, IEEE80211_SEQ_MAX);
1944 INCR(tid->baw_head, ATH_TID_MAX_BUFS);
1948 static void ath_tx_set_retry(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1950 struct ieee80211_frame *wh;
1952 __stats(sc, txaggr_retries);
1954 bf->bf_isretried = 1;
1956 wh = ATH_SKB_2_WH(bf->bf_skb);
1957 wh->i_fc[1] |= IEEE80211_FC1_RETRY;
1960 void ath_tgt_tx_cleanup(struct ath_softc_tgt *sc, struct ath_node_target *an,
1961 ath_atx_tid_t *tid, a_uint8_t discard_all)
1963 struct ath_tx_buf *bf;
1964 struct ath_tx_buf *bf_next;
1965 struct ath_txq *txq;
1967 txq = TID_TO_ACTXQ(tid->tidno);
1969 bf = asf_tailq_first(&tid->buf_q);
1972 if (discard_all || bf->bf_isretried) {
1973 bf_next = asf_tailq_next(bf, bf_list);
1974 TAILQ_DEQ(&tid->buf_q, bf, bf_list);
1975 if (bf->bf_isretried)
1976 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1977 ath_tx_freebuf(sc, bf);
1981 bf->bf_comp = ath_tgt_tx_comp_normal;
1982 bf = asf_tailq_next(bf, bf_list);
1985 ath_aggr_pause_tid(sc, tid);
1987 while (tid->baw_head != tid->baw_tail) {
1988 if (TX_BUF_BITMAP_IS_SET(tid->tx_buf_bitmap, tid->baw_head)) {
1990 tid->flag |= TID_CLEANUP_INPROGRES;
1991 TX_BUF_BITMAP_CLR(tid->tx_buf_bitmap, tid->baw_head);
1993 INCR(tid->baw_head, ATH_TID_MAX_BUFS);
1994 INCR(tid->seq_start, IEEE80211_SEQ_MAX);
1997 if (!(tid->flag & TID_CLEANUP_INPROGRES)) {
1998 ath_aggr_resume_tid(sc, tid);
2002 /******************/
2003 /* BAR Management */
2004 /******************/
2006 static void ath_tgt_delba_send(struct ath_softc_tgt *sc,
2007 struct ieee80211_node_target *ni,
2008 a_uint8_t tidno, a_uint8_t initiator,
2009 a_uint16_t reasoncode)
2011 struct ath_node_target *an = ATH_NODE_TARGET(ni);
2012 ath_atx_tid_t *tid = ATH_AN_2_TID(an, tidno);
2013 struct wmi_data_delba wmi_delba;
2015 tid->flag &= ~TID_AGGR_ENABLED;
2017 ath_tgt_tx_cleanup(sc, an, tid, 1);
2019 wmi_delba.ni_nodeindex = ni->ni_nodeindex;
2020 wmi_delba.tidno = tid->tidno;
2021 wmi_delba.initiator = 1;
2022 wmi_delba.reasoncode = IEEE80211_REASON_UNSPECIFIED;
2024 __stats(sc, txbar_xretry);
2025 wmi_event(sc->tgt_wmi_handle,
2031 static void ath_bar_retry(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
2033 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
2034 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
2036 if (bf->bf_retries >= OWLMAX_BAR_RETRIES) {
2037 ath_tgt_delba_send(sc, bf->bf_node, tid->tidno, 1,
2038 IEEE80211_REASON_UNSPECIFIED);
2039 ath_tgt_tid_drain(sc, tid);
2042 ath_buf_comp(sc, bf);
2046 __stats(sc, txbar_compretries);
2048 if (!bf->bf_lastds->ds_link) {
2049 __stats(sc, txbar_errlast);
2050 bf = ath_buf_toggle(sc, bf, 1);
2053 bf->bf_lastds->ds_link = 0;
2055 ath_tx_set_retry(sc, bf);
2056 ath_tgt_txq_add_ucast(sc, bf);
2059 static void ath_bar_tx_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
2061 struct ath_tx_desc *ds = bf->bf_lastds;
2062 struct ath_node_target *an;
2064 struct ath_txq *txq;
2066 an = (struct ath_node_target *)bf->bf_node;
2067 tid = &an->tid[bf->bf_tidno];
2068 txq = TID_TO_ACTXQ(tid->tidno);
2070 if (ATH_DS_TX_STATUS(ds) & HAL_TXERR_XRETRY) {
2071 ath_bar_retry(sc, bf);
2075 ath_aggr_resume_tid(sc, tid);
2078 ath_buf_comp(sc, bf);
2081 static void ath_bar_tx(struct ath_softc_tgt *sc,
2082 ath_atx_tid_t *tid, struct ath_tx_buf *bf)
2085 struct ieee80211_frame_bar *bar;
2087 struct ath_tx_desc *ds, *ds0;
2088 struct ath_hal *ah = sc->sc_ah;
2089 HAL_11N_RATE_SERIES series[4];
2091 adf_nbuf_queue_t skbhead;
2095 __stats(sc, tx_bars);
2097 adf_os_mem_set(&series, 0, sizeof(series));
2099 ath_aggr_pause_tid(sc, tid);
2101 skb = adf_nbuf_queue_remove(&bf->bf_skbhead);
2102 adf_nbuf_peek_header(skb, &anbdata, &anblen);
2103 adf_nbuf_trim_tail(skb, anblen);
2104 bar = (struct ieee80211_frame_bar *) anbdata;
2108 ath_dma_unmap(sc, bf);
2109 adf_nbuf_queue_add(&bf->bf_skbhead, skb);
2111 bar->i_fc[1] = IEEE80211_FC1_DIR_NODS;
2112 bar->i_fc[0] = IEEE80211_FC0_VERSION_0 |
2113 IEEE80211_FC0_TYPE_CTL |
2114 IEEE80211_FC0_SUBTYPE_BAR;
2115 bar->i_ctl = tid->tidno << IEEE80211_BAR_CTL_TID_S |
2116 IEEE80211_BAR_CTL_COMBA;
2117 bar->i_seq = adf_os_cpu_to_le16(tid->seq_start << IEEE80211_SEQ_SEQ_SHIFT);
2119 bf->bf_seqno = tid->seq_start << IEEE80211_SEQ_SEQ_SHIFT;
2121 adf_nbuf_put_tail(skb, sizeof(struct ieee80211_frame_bar));
2123 bf->bf_comp = ath_bar_tx_comp;
2124 bf->bf_tidno = tid->tidno;
2125 bf->bf_node = &tid->an->ni;
2126 ath_dma_map(sc, bf);
2127 adf_nbuf_dmamap_info(bf->bf_dmamap, &bf->bf_dmamap_info);
2130 ath_hal_setuptxdesc(sc->sc_ah, ds
2131 , adf_nbuf_len(skb) + IEEE80211_CRC_LEN
2133 , HAL_PKT_TYPE_NORMAL
2140 | HAL_TXDESC_CLRDMASK
2142 , ATH_COMP_PROC_NO_COMP_NO_CCS);
2144 skbhead = bf->bf_skbhead;
2148 for (ds0 = ds, i=0; i < bf->bf_dmamap_info.nsegs; ds0++, i++) {
2149 ah->ah_clr11nAggr(ah, ds0);
2152 ath_filltxdesc(sc, bf);
2154 for (i = 0 ; i < 4; i++) {
2155 series[i].Tries = ATH_TXMAXTRY;
2156 series[i].Rate = min_rate;
2157 series[i].ChSel = sc->sc_ic.ic_tx_chainmask;
2160 ah->ah_set11nRateScenario(ah, bf->bf_desc, 0, 0, 0, series, 4, 4);
2161 ath_tgt_txq_add_ucast(sc, bf);