2 * Copyright (c) 2013 Qualcomm Atheros, Inc.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted (subject to the limitations in the
7 * disclaimer below) provided that the following conditions are met:
9 * * Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the
17 * * Neither the name of Qualcomm Atheros nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
22 * GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
23 * HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
24 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
25 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
27 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
29 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
30 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
31 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
32 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
33 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 #include <adf_os_types.h>
37 #include <adf_os_dma.h>
38 #include <adf_os_timer.h>
39 #include <adf_os_lock.h>
40 #include <adf_os_io.h>
41 #include <adf_os_mem.h>
42 #include <adf_os_util.h>
43 #include <adf_os_stdtypes.h>
44 #include <adf_os_defer.h>
45 #include <adf_os_atomic.h>
48 #include <adf_net_wcmd.h>
52 #ifdef USE_HEADERLEN_RESV
56 #include <ieee80211_var.h>
57 #include "if_athrate.h"
58 #include "if_athvar.h"
60 #include "ah_internal.h"
62 #define ath_tgt_free_skb adf_nbuf_free
64 #define OFDM_PLCP_BITS 22
65 #define HT_RC_2_MCS(_rc) ((_rc) & 0x0f)
66 #define HT_RC_2_STREAMS(_rc) ((((_rc) & 0x78) >> 3) + 1)
72 #define HT_LTF(_ns) (4 * (_ns))
73 #define SYMBOL_TIME(_ns) ((_ns) << 2) // ns * 4 us
74 #define SYMBOL_TIME_HALFGI(_ns) (((_ns) * 18 + 4) / 5) // ns * 3.6 us
76 static a_uint16_t bits_per_symbol[][2] = {
78 { 26, 54 }, // 0: BPSK
79 { 52, 108 }, // 1: QPSK 1/2
80 { 78, 162 }, // 2: QPSK 3/4
81 { 104, 216 }, // 3: 16-QAM 1/2
82 { 156, 324 }, // 4: 16-QAM 3/4
83 { 208, 432 }, // 5: 64-QAM 2/3
84 { 234, 486 }, // 6: 64-QAM 3/4
85 { 260, 540 }, // 7: 64-QAM 5/6
86 { 52, 108 }, // 8: BPSK
87 { 104, 216 }, // 9: QPSK 1/2
88 { 156, 324 }, // 10: QPSK 3/4
89 { 208, 432 }, // 11: 16-QAM 1/2
90 { 312, 648 }, // 12: 16-QAM 3/4
91 { 416, 864 }, // 13: 64-QAM 2/3
92 { 468, 972 }, // 14: 64-QAM 3/4
93 { 520, 1080 }, // 15: 64-QAM 5/6
96 void owltgt_tx_processq(struct ath_softc_tgt *sc, struct ath_txq *txq,
97 owl_txq_state_t txqstate);
98 static void ath_tgt_txqaddbuf(struct ath_softc_tgt *sc, struct ath_txq *txq,
99 struct ath_tx_buf *bf, struct ath_tx_desc *lastds);
100 void ath_rate_findrate_11n_Hardcoded(struct ath_softc_tgt *sc,
101 struct ath_rc_series series[]);
102 void ath_buf_set_rate_Hardcoded(struct ath_softc_tgt *sc,
103 struct ath_tx_buf *bf) ;
104 static a_int32_t ath_tgt_txbuf_setup(struct ath_softc_tgt *sc,
105 struct ath_tx_buf *bf, ath_data_hdr_t *dh);
106 static void ath_tx_freebuf(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
107 static void ath_tx_uc_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
108 static void ath_update_stats(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
109 void adf_print_buf(adf_nbuf_t buf);
110 static void ath_tgt_tx_enqueue(struct ath_txq *txq, struct ath_atx_tid *tid);
112 void ath_tgt_tx_comp_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
113 struct ieee80211_frame *ATH_SKB_2_WH(adf_nbuf_t skb);
115 void ath_tgt_tx_send_normal(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
117 static void ath_tgt_tx_sched_normal(struct ath_softc_tgt *sc, ath_atx_tid_t *tid);
118 static void ath_tgt_tx_sched_aggr(struct ath_softc_tgt *sc, ath_atx_tid_t *tid);
120 extern a_int32_t ath_chainmask_sel_logic(void *);
121 static a_int32_t ath_get_pktlen(struct ath_tx_buf *bf, a_int32_t hdrlen);
122 static void ath_tgt_txq_schedule(struct ath_softc_tgt *sc, struct ath_txq *txq);
124 typedef void (*ath_ft_set_atype_t)(struct ath_softc_tgt *sc, struct ath_buf *bf);
127 ath_tx_set_retry(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
130 ath_bar_tx(struct ath_softc_tgt *sc, ath_atx_tid_t *tid, struct ath_tx_buf *bf);
132 ath_tx_update_baw(ath_atx_tid_t *tid, int seqno);
134 ath_tx_retry_subframe(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
135 ath_tx_bufhead *bf_q, struct ath_tx_buf **bar);
138 ath_tx_comp_aggr_error(struct ath_softc_tgt *sc, struct ath_tx_buf *bf, ath_atx_tid_t *tid);
140 void ath_tx_addto_baw(ath_atx_tid_t *tid, struct ath_tx_buf *bf);
141 static inline void ath_tx_retry_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
142 static void ath_tx_comp_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
143 static void ath_update_aggr_stats(struct ath_softc_tgt *sc, struct ath_tx_desc *ds,
144 int nframes, int nbad);
145 static inline void ath_aggr_resume_tid(struct ath_softc_tgt *sc, ath_atx_tid_t *tid);
146 static void ath_tx_comp_cleanup(struct ath_softc_tgt *sc, struct ath_tx_buf *bf);
148 int ath_tgt_tx_add_to_aggr(struct ath_softc_tgt *sc,
149 struct ath_buf *bf,int datatype,
150 ath_atx_tid_t *tid, int is_burst);
151 int ath_tgt_tx_form_aggr(struct ath_softc_tgt *sc, ath_atx_tid_t *tid,
152 ath_tx_bufhead *bf_q);
154 struct ieee80211_frame *ATH_SKB_2_WH(adf_nbuf_t skb)
159 adf_nbuf_peek_header(skb, &anbdata, &anblen);
161 return((struct ieee80211_frame *)anbdata);
164 #undef adf_os_cpu_to_le16
166 static a_uint16_t adf_os_cpu_to_le16(a_uint16_t x)
168 return ((((x) & 0xff00) >> 8) | (((x) & 0x00ff) << 8));
172 ath_aggr_resume_tid(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
176 txq = TID_TO_ACTXQ(tid->tidno);
179 if (asf_tailq_empty(&tid->buf_q))
182 ath_tgt_tx_enqueue(txq, tid);
183 ath_tgt_txq_schedule(sc, txq);
187 ath_aggr_pause_tid(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
192 static a_uint32_t ath_pkt_duration(struct ath_softc_tgt *sc,
193 a_uint8_t rix, struct ath_tx_buf *bf,
194 a_int32_t width, a_int32_t half_gi)
196 const HAL_RATE_TABLE *rt = sc->sc_currates;
197 a_uint32_t nbits, nsymbits, duration, nsymbols;
202 pktlen = bf->bf_isaggr ? bf->bf_al : bf->bf_pktlen;
203 rc = rt->info[rix].rateCode;
206 return ath_hal_computetxtime(sc->sc_ah, rt, pktlen, rix,
209 nbits = (pktlen << 3) + OFDM_PLCP_BITS;
210 nsymbits = bits_per_symbol[HT_RC_2_MCS(rc)][width];
211 nsymbols = (nbits + nsymbits - 1) / nsymbits;
214 duration = SYMBOL_TIME(nsymbols);
216 duration = SYMBOL_TIME_HALFGI(nsymbols);
218 streams = HT_RC_2_STREAMS(rc);
219 duration += L_STF + L_LTF + L_SIG + HT_SIG + HT_STF + HT_LTF(streams);
224 static void ath_dma_map(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
226 adf_nbuf_t skb = bf->bf_skb;
228 skb = adf_nbuf_queue_first(&bf->bf_skbhead);
229 adf_nbuf_map(sc->sc_dev, bf->bf_dmamap, skb, ADF_OS_DMA_TO_DEVICE);
232 static void ath_dma_unmap(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
234 adf_nbuf_queue_first(&bf->bf_skbhead);
235 adf_nbuf_unmap( sc->sc_dev, bf->bf_dmamap, ADF_OS_DMA_TO_DEVICE);
238 static void ath_filltxdesc(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
240 struct ath_tx_desc *ds0, *ds = bf->bf_desc;
241 struct ath_hal *ah = sc->sc_ah;
245 adf_nbuf_dmamap_info(bf->bf_dmamap, &bf->bf_dmamap_info);
247 for (i = 0; i < bf->bf_dmamap_info.nsegs; i++, ds++) {
249 ds->ds_data = bf->bf_dmamap_info.dma_segs[i].paddr;
251 if (i == (bf->bf_dmamap_info.nsegs - 1)) {
255 ds->ds_link = ATH_BUF_GET_DESC_PHY_ADDR_WITH_IDX(bf, i+1);
258 , bf->bf_dmamap_info.dma_segs[i].len
260 , i == (bf->bf_dmamap_info.nsegs - 1)
265 static void ath_tx_tgt_setds(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
267 struct ath_tx_desc *ds = bf->bf_desc;
268 struct ath_hal *ah = sc->sc_ah;
270 switch (bf->bf_protmode) {
271 case IEEE80211_PROT_RTSCTS:
272 bf->bf_flags |= HAL_TXDESC_RTSENA;
274 case IEEE80211_PROT_CTSONLY:
275 bf->bf_flags |= HAL_TXDESC_CTSENA;
281 ah->ah_set11nTxDesc(ds
287 , bf->bf_flags | HAL_TXDESC_INTREQ);
289 ath_filltxdesc(sc, bf);
292 static struct ath_tx_buf *ath_buf_toggle(struct ath_softc_tgt *sc,
293 struct ath_tx_buf *bf,
296 struct ath_tx_buf *tmp = NULL;
297 adf_nbuf_t buf = NULL;
299 adf_os_assert(sc->sc_txbuf_held != NULL);
301 tmp = sc->sc_txbuf_held;
304 ath_dma_unmap(sc, bf);
305 adf_nbuf_queue_init(&tmp->bf_skbhead);
306 buf = adf_nbuf_queue_remove(&bf->bf_skbhead);
308 adf_nbuf_queue_add(&tmp->bf_skbhead, buf);
310 adf_os_assert(adf_nbuf_queue_len(&bf->bf_skbhead) == 0);
312 tmp->bf_next = bf->bf_next;
313 tmp->bf_endpt = bf->bf_endpt;
314 tmp->bf_tidno = bf->bf_tidno;
315 tmp->bf_skb = bf->bf_skb;
316 tmp->bf_node = bf->bf_node;
317 tmp->bf_isaggr = bf->bf_isaggr;
318 tmp->bf_flags = bf->bf_flags;
319 tmp->bf_state = bf->bf_state;
320 tmp->bf_retries = bf->bf_retries;
321 tmp->bf_comp = bf->bf_comp;
322 tmp->bf_nframes = bf->bf_nframes;
323 tmp->bf_cookie = bf->bf_cookie;
335 ath_dma_map(sc, tmp);
336 ath_tx_tgt_setds(sc, tmp);
339 sc->sc_txbuf_held = bf;
344 static void ath_tgt_skb_free(struct ath_softc_tgt *sc,
345 adf_nbuf_queue_t *head,
346 HTC_ENDPOINT_ID endpt)
350 while (adf_nbuf_queue_len(head) != 0) {
351 tskb = adf_nbuf_queue_remove(head);
352 ath_free_tx_skb(sc->tgt_htc_handle,endpt,tskb);
356 static void ath_buf_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
358 ath_dma_unmap(sc, bf);
359 ath_tgt_skb_free(sc, &bf->bf_skbhead,bf->bf_endpt);
362 bf = ath_buf_toggle(sc, bf, 0);
364 asf_tailq_insert_tail(&sc->sc_txbuf, bf, bf_list);
368 static void ath_buf_set_rate(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
370 struct ath_hal *ah = sc->sc_ah;
371 const HAL_RATE_TABLE *rt;
372 struct ath_tx_desc *ds = bf->bf_desc;
373 HAL_11N_RATE_SERIES series[4];
375 a_uint8_t rix, cix, rtsctsrate;
376 a_int32_t prot_mode = AH_FALSE;
378 rt = sc->sc_currates;
379 rix = bf->bf_rcs[0].rix;
380 flags = (bf->bf_flags & (HAL_TXDESC_RTSENA | HAL_TXDESC_CTSENA));
381 cix = rt->info[sc->sc_protrix].controlRate;
383 if (bf->bf_protmode != IEEE80211_PROT_NONE &&
384 (rt->info[rix].phy == IEEE80211_T_OFDM ||
385 rt->info[rix].phy == IEEE80211_T_HT) &&
386 (bf->bf_flags & HAL_TXDESC_NOACK) == 0) {
387 cix = rt->info[sc->sc_protrix].controlRate;
390 if (ath_hal_getcapability(ah, HAL_CAP_HT) && (!bf->bf_ismcast))
391 flags = HAL_TXDESC_RTSENA;
394 if (bf->bf_rcs[i].tries) {
395 cix = rt->info[bf->bf_rcs[i].rix].controlRate;
402 adf_os_mem_set(series, 0, sizeof(HAL_11N_RATE_SERIES) * 4);
404 for (i = 0; i < 4; i++) {
405 if (!bf->bf_rcs[i].tries)
408 rix = bf->bf_rcs[i].rix;
410 series[i].Rate = rt->info[rix].rateCode |
411 (bf->bf_shpream ? rt->info[rix].shortPreamble : 0);
413 series[i].Tries = bf->bf_rcs[i].tries;
415 series[i].RateFlags = ((bf->bf_rcs[i].flags & ATH_RC_RTSCTS_FLAG) ?
416 HAL_RATESERIES_RTS_CTS : 0 ) |
417 ((bf->bf_rcs[i].flags & ATH_RC_CW40_FLAG) ?
418 HAL_RATESERIES_2040 : 0 ) |
419 ((bf->bf_rcs[i].flags & ATH_RC_HT40_SGI_FLAG) ?
420 HAL_RATESERIES_HALFGI : 0 ) |
421 ((bf->bf_rcs[i].flags & ATH_RC_TX_STBC_FLAG) ?
422 HAL_RATESERIES_STBC: 0);
424 series[i].RateFlags = ((bf->bf_rcs[i].flags & ATH_RC_RTSCTS_FLAG) ?
425 HAL_RATESERIES_RTS_CTS : 0 ) |
426 ((bf->bf_rcs[i].flags & ATH_RC_CW40_FLAG) ?
427 HAL_RATESERIES_2040 : 0 ) |
428 ((bf->bf_rcs[i].flags & ATH_RC_HT40_SGI_FLAG) ?
429 HAL_RATESERIES_HALFGI : 0 );
431 series[i].PktDuration = ath_pkt_duration(sc, rix, bf,
432 (bf->bf_rcs[i].flags & ATH_RC_CW40_FLAG) != 0,
433 (bf->bf_rcs[i].flags & ATH_RC_HT40_SGI_FLAG));
435 series[i].ChSel = sc->sc_ic.ic_tx_chainmask;
438 series[i].RateFlags |= HAL_RATESERIES_RTS_CTS;
440 if (bf->bf_rcs[i].flags & ATH_RC_DS_FLAG)
441 series[i].RateFlags |= HAL_RATESERIES_RTS_CTS;
444 rtsctsrate = rt->info[cix].rateCode |
445 (bf->bf_shpream ? rt->info[cix].shortPreamble : 0);
447 ah->ah_set11nRateScenario(ds, 1,
453 static void ath_tgt_rate_findrate(struct ath_softc_tgt *sc,
454 struct ath_node_target *an,
455 a_int32_t shortPreamble,
461 struct ath_rc_series series[],
464 ath_rate_findrate(sc, an, 1, frameLen, 10, 4, 1,
465 ATH_RC_PROBE_ALLOWED, series, isProbe);
468 static void owl_tgt_tid_init(struct ath_atx_tid *tid)
472 tid->seq_start = tid->seq_next = 0;
473 tid->baw_size = WME_MAX_BA;
474 tid->baw_head = tid->baw_tail = 0;
477 tid->sched = AH_FALSE;
479 asf_tailq_init(&tid->buf_q);
481 for (i = 0; i < ATH_TID_MAX_BUFS; i++) {
482 TX_BUF_BITMAP_CLR(tid->tx_buf_bitmap, i);
486 static void owl_tgt_tid_cleanup(struct ath_softc_tgt *sc,
487 struct ath_atx_tid *tid)
494 tid->flag &= ~TID_CLEANUP_INPROGRES;
496 if (tid->flag & TID_REINITIALIZE) {
497 adf_os_print("TID REINIT DONE for tid %p\n", tid);
498 tid->flag &= ~TID_REINITIALIZE;
499 owl_tgt_tid_init(tid);
501 ath_aggr_resume_tid(sc, tid);
505 void owl_tgt_node_init(struct ath_node_target * an)
507 struct ath_atx_tid *tid;
510 for (tidno = 0, tid = &an->tid[tidno]; tidno < WME_NUM_TID;tidno++, tid++) {
514 if ( tid->flag & TID_CLEANUP_INPROGRES ) {
515 tid->flag |= TID_REINITIALIZE;
516 adf_os_print("tid[%p]->incomp is not 0: %d\n",
519 owl_tgt_tid_init(tid);
524 void ath_tx_status_clear(struct ath_softc_tgt *sc)
528 for (i = 0; i < 2; i++) {
529 sc->tx_status[i].cnt = 0;
533 static WMI_TXSTATUS_EVENT *ath_tx_status_get(struct ath_softc_tgt *sc)
535 WMI_TXSTATUS_EVENT *txs = NULL;
538 for (i = 0; i < 2; i++) {
539 if (sc->tx_status[i].cnt < HTC_MAX_TX_STATUS) {
540 txs = &sc->tx_status[i];
548 void ath_tx_status_update(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
550 struct ath_tx_desc *ds = bf->bf_lastds;
551 WMI_TXSTATUS_EVENT *txs;
553 if (sc->sc_tx_draining)
556 txs = ath_tx_status_get(sc);
560 txs->txstatus[txs->cnt].ts_flags = 0;
562 txs->txstatus[txs->cnt].cookie = bf->bf_cookie;
563 txs->txstatus[txs->cnt].ts_rate = SM(bf->bf_endpt, ATH9K_HTC_TXSTAT_EPID);
565 if (ds->ds_txstat.ts_status & HAL_TXERR_FILT)
566 txs->txstatus[txs->cnt].ts_flags |= ATH9K_HTC_TXSTAT_FILT;
568 if (!(ds->ds_txstat.ts_status & HAL_TXERR_XRETRY) &&
569 !(ds->ds_txstat.ts_status & HAL_TXERR_FIFO) &&
570 !(ds->ds_txstat.ts_status & HAL_TXERR_TIMER_EXPIRED) &&
571 !(ds->ds_txstat.ts_status & HAL_TXERR_FILT))
572 txs->txstatus[txs->cnt].ts_flags |= ATH9K_HTC_TXSTAT_ACK;
574 ath_tx_status_update_rate(sc, bf->bf_rcs, ds->ds_txstat.ts_rate, txs);
579 void ath_tx_status_update_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
580 struct ath_tx_desc *ds, struct ath_rc_series rcs[],
583 WMI_TXSTATUS_EVENT *txs;
585 if (sc->sc_tx_draining)
588 txs = ath_tx_status_get(sc);
592 txs->txstatus[txs->cnt].cookie = bf->bf_cookie;
593 txs->txstatus[txs->cnt].ts_rate = SM(bf->bf_endpt, ATH9K_HTC_TXSTAT_EPID);
596 txs->txstatus[txs->cnt].ts_flags |= ATH9K_HTC_TXSTAT_ACK;
599 ath_tx_status_update_rate(sc, rcs, ds->ds_txstat.ts_rate, txs);
604 void ath_tx_status_send(struct ath_softc_tgt *sc)
608 if (sc->sc_tx_draining)
611 for (i = 0; i < 2; i++) {
612 if (sc->tx_status[i].cnt) {
613 wmi_event(sc->tgt_wmi_handle, WMI_TXSTATUS_EVENTID,
614 &sc->tx_status[i], sizeof(WMI_TXSTATUS_EVENT));
615 /* FIXME: Handle failures. */
616 sc->tx_status[i].cnt = 0;
621 static void owltgt_tx_process_cabq(struct ath_softc_tgt *sc, struct ath_txq *txq)
623 struct ath_hal *ah = sc->sc_ah;
624 ah->ah_setInterrupts(ah, sc->sc_imask & ~HAL_INT_SWBA);
625 owltgt_tx_processq(sc, txq, OWL_TXQ_ACTIVE);
626 ah->ah_setInterrupts(ah, sc->sc_imask);
629 void owl_tgt_tx_tasklet(TQUEUE_ARG data)
631 struct ath_softc_tgt *sc = (struct ath_softc_tgt *)data;
635 ath_tx_status_clear(sc);
637 for (i = 0; i < (HAL_NUM_TX_QUEUES - 6); i++) {
638 txq = ATH_TXQ(sc, i);
640 if (ATH_TXQ_SETUP(sc, i)) {
641 if (txq == sc->sc_cabq)
642 owltgt_tx_process_cabq(sc, txq);
644 owltgt_tx_processq(sc, txq, OWL_TXQ_ACTIVE);
648 ath_tx_status_send(sc);
651 void owltgt_tx_processq(struct ath_softc_tgt *sc, struct ath_txq *txq,
652 owl_txq_state_t txqstate)
654 struct ath_tx_buf *bf;
655 struct ath_tx_desc *ds;
656 struct ath_hal *ah = sc->sc_ah;
660 if (asf_tailq_empty(&txq->axq_q)) {
661 txq->axq_link = NULL;
662 txq->axq_linkbuf = NULL;
666 bf = asf_tailq_first(&txq->axq_q);
669 status = ah->ah_procTxDesc(ah, ds);
671 if (status == HAL_EINPROGRESS) {
672 if (txqstate == OWL_TXQ_ACTIVE)
674 else if (txqstate == OWL_TXQ_STOPPED) {
675 __stats(sc, tx_stopfiltered);
676 ds->ds_txstat.ts_flags = 0;
677 ds->ds_txstat.ts_status = HAL_OK;
679 ds->ds_txstat.ts_flags = HAL_TX_SW_FILTERED;
683 ATH_TXQ_REMOVE_HEAD(txq, bf, bf_list);
684 if ((asf_tailq_empty(&txq->axq_q))) {
685 __stats(sc, tx_qnull);
686 txq->axq_link = NULL;
687 txq->axq_linkbuf = NULL;
693 ath_tx_status_update(sc, bf);
694 ath_buf_comp(sc, bf);
697 if (txqstate == OWL_TXQ_ACTIVE) {
698 ath_tgt_txq_schedule(sc, txq);
703 static struct ieee80211_frame* ATH_SKB2_WH(adf_nbuf_t skb)
708 adf_nbuf_peek_header(skb, &anbdata, &anblen);
709 return((struct ieee80211_frame *)anbdata);
713 ath_tgt_tid_drain(struct ath_softc_tgt *sc, struct ath_atx_tid *tid)
715 struct ath_tx_buf *bf;
717 while (!asf_tailq_empty(&tid->buf_q)) {
718 TAILQ_DEQ(&tid->buf_q, bf, bf_list);
719 ath_tx_freebuf(sc, bf);
722 tid->seq_next = tid->seq_start;
723 tid->baw_tail = tid->baw_head;
726 static void ath_tgt_tx_comp_normal(struct ath_softc_tgt *sc,
727 struct ath_tx_buf *bf)
729 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
730 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
732 if (tid->flag & TID_CLEANUP_INPROGRES) {
733 owl_tgt_tid_cleanup(sc, tid);
737 ath_tx_uc_comp(sc, bf);
740 ath_tx_freebuf(sc, bf);
743 static struct ieee80211_node_target * ath_tgt_find_node(struct ath_softc_tgt *sc,
744 a_int32_t node_index)
746 struct ath_node_target *an;
747 struct ieee80211_node_target *ni;
749 if (node_index > TARGET_NODE_MAX)
752 an = &sc->sc_sta[node_index];
756 if (ni->ni_vap == NULL) {
765 static struct ath_tx_buf* ath_tx_buf_alloc(struct ath_softc_tgt *sc)
767 struct ath_tx_buf *bf = NULL;
769 bf = asf_tailq_first(&sc->sc_txbuf);
771 adf_os_mem_set(&bf->bf_state, 0, sizeof(struct ath_buf_state));
772 asf_tailq_remove(&sc->sc_txbuf, bf, bf_list);
780 struct ath_tx_buf* ath_tgt_tx_prepare(struct ath_softc_tgt *sc,
781 adf_nbuf_t skb, ath_data_hdr_t *dh)
783 struct ath_tx_buf *bf;
784 struct ieee80211_node_target *ni;
785 struct ath_atx_tid *tid;
787 ni = ath_tgt_find_node(sc, dh->ni_index);
791 tid = ATH_AN_2_TID(ATH_NODE_TARGET(ni), dh->tidno);
792 if (tid->flag & TID_REINITIALIZE) {
793 adf_os_print("drop frame due to TID reinit\n");
797 bf = ath_tx_buf_alloc(sc);
799 __stats(sc, tx_nobufs);
803 bf->bf_tidno = dh->tidno;
804 bf->bf_txq = TID_TO_ACTXQ(bf->bf_tidno);
805 bf->bf_keytype = dh->keytype;
806 bf->bf_keyix = dh->keyix;
807 bf->bf_protmode = dh->flags & (IEEE80211_PROT_RTSCTS | IEEE80211_PROT_CTSONLY);
810 adf_nbuf_queue_add(&bf->bf_skbhead, skb);
811 skb = adf_nbuf_queue_first(&(bf->bf_skbhead));
813 if (adf_nbuf_queue_len(&(bf->bf_skbhead)) == 0) {
814 __stats(sc, tx_noskbs);
822 ath_tgt_txbuf_setup(sc, bf, dh);
824 ath_tx_tgt_setds(sc, bf);
829 static void ath_tgt_tx_seqno_normal(struct ath_tx_buf *bf)
831 struct ieee80211_node_target *ni = bf->bf_node;
832 struct ath_node_target *an = ATH_NODE_TARGET(ni);
833 struct ieee80211_frame *wh = ATH_SKB_2_WH(bf->bf_skb);
834 struct ath_atx_tid *tid = ATH_AN_2_TID(an, bf->bf_tidno);
836 u_int8_t fragno = (wh->i_seq[0] & 0xf);
838 INCR(ni->ni_txseqmgmt, IEEE80211_SEQ_MAX);
840 bf->bf_seqno = (tid->seq_next << IEEE80211_SEQ_SEQ_SHIFT);
842 *(u_int16_t *)wh->i_seq = adf_os_cpu_to_le16(bf->bf_seqno);
843 wh->i_seq[0] |= fragno;
845 if (!(wh->i_fc[1] & IEEE80211_FC1_MORE_FRAG))
846 INCR(tid->seq_next, IEEE80211_SEQ_MAX);
849 static a_int32_t ath_key_setup(struct ieee80211_node_target *ni,
850 struct ath_tx_buf *bf)
852 struct ieee80211_frame *wh = ATH_SKB_2_WH(bf->bf_skb);
854 if (!(wh->i_fc[1] & IEEE80211_FC1_WEP)) {
855 bf->bf_keytype = HAL_KEY_TYPE_CLEAR;
856 bf->bf_keyix = HAL_TXKEYIX_INVALID;
860 switch (bf->bf_keytype) {
861 case HAL_KEY_TYPE_WEP:
862 bf->bf_pktlen += IEEE80211_WEP_ICVLEN;
864 case HAL_KEY_TYPE_AES:
865 bf->bf_pktlen += IEEE80211_WEP_MICLEN;
867 case HAL_KEY_TYPE_TKIP:
868 bf->bf_pktlen += IEEE80211_WEP_ICVLEN;
874 if (bf->bf_keytype == HAL_KEY_TYPE_AES ||
875 bf->bf_keytype == HAL_KEY_TYPE_TKIP)
876 ieee80211_tgt_crypto_encap(wh, ni, bf->bf_keytype);
881 static void ath_tgt_txq_add_ucast(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
883 struct ath_hal *ah = sc->sc_ah;
885 volatile a_int32_t txe_val;
891 ah->ah_procTxDesc(ah, bf->bf_lastds);
893 ATH_TXQ_INSERT_TAIL(txq, bf, bf_list);
895 if (txq->axq_link == NULL) {
896 ah->ah_setTxDP(ah, txq->axq_qnum, ATH_BUF_GET_DESC_PHY_ADDR(bf));
898 *txq->axq_link = ATH_BUF_GET_DESC_PHY_ADDR(bf);
900 txe_val = ioread32_mac(0x0840);
901 if (!(txe_val & (1<< txq->axq_qnum)))
902 ah->ah_setTxDP(ah, txq->axq_qnum, ATH_BUF_GET_DESC_PHY_ADDR(bf));
905 txq->axq_link = &bf->bf_lastds->ds_link;
906 ah->ah_startTxDma(ah, txq->axq_qnum);
909 static a_int32_t ath_tgt_txbuf_setup(struct ath_softc_tgt *sc,
910 struct ath_tx_buf *bf,
914 struct ieee80211_frame *wh = ATH_SKB2_WH(bf->bf_skb);
916 a_uint32_t flags = adf_os_ntohl(dh->flags);
918 ath_tgt_tx_seqno_normal(bf);
920 bf->bf_txq_add = ath_tgt_txq_add_ucast;
921 bf->bf_hdrlen = ieee80211_anyhdrsize(wh);
922 bf->bf_pktlen = ath_get_pktlen(bf, bf->bf_hdrlen);
923 bf->bf_ismcast = IEEE80211_IS_MULTICAST(wh->i_addr1);
925 if ((retval = ath_key_setup(bf->bf_node, bf)) < 0)
928 if (flags & ATH_SHORT_PREAMBLE)
929 bf->bf_shpream = AH_TRUE;
931 bf->bf_shpream = AH_FALSE;
933 bf->bf_flags = HAL_TXDESC_CLRDMASK;
934 bf->bf_atype = HAL_PKT_TYPE_NORMAL;
940 ath_get_pktlen(struct ath_tx_buf *bf, a_int32_t hdrlen)
942 adf_nbuf_t skb = bf->bf_skb;
945 skb = adf_nbuf_queue_first(&bf->bf_skbhead);
946 pktlen = adf_nbuf_len(skb);
948 pktlen -= (hdrlen & 3);
949 pktlen += IEEE80211_CRC_LEN;
955 ath_tgt_tx_send_normal(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
957 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
958 struct ath_rc_series rcs[4];
959 struct ath_rc_series mrcs[4];
960 a_int32_t shortPreamble = 0;
961 a_int32_t isProbe = 0;
963 adf_os_mem_set(rcs, 0, sizeof(struct ath_rc_series)*4 );
964 adf_os_mem_set(mrcs, 0, sizeof(struct ath_rc_series)*4 );
966 if (!bf->bf_ismcast) {
967 ath_tgt_rate_findrate(sc, an, shortPreamble,
970 ath_hal_memcpy(bf->bf_rcs, rcs, sizeof(rcs));
972 struct ath_vap_target *avp;
974 avp = &sc->sc_vap[bf->vap_index];
976 mrcs[1].tries = mrcs[2].tries = mrcs[3].tries = 0;
977 mrcs[1].rix = mrcs[2].rix = mrcs[3].rix = 0;
978 mrcs[0].rix = ath_get_minrateidx(sc, avp);
981 ath_hal_memcpy(bf->bf_rcs, mrcs, sizeof(mrcs));
984 ath_buf_set_rate(sc, bf);
985 bf->bf_txq_add(sc, bf);
989 ath_tx_freebuf(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
992 struct ath_tx_desc *bfd = NULL;
993 struct ath_hal *ah = sc->sc_ah;
995 for (bfd = bf->bf_desc, i = 0; i < bf->bf_dmamap_info.nsegs; bfd++, i++) {
996 ah->ah_clr11nAggr(bfd);
997 ah->ah_set11nBurstDuration(bfd, 0);
998 ah->ah_set11nVirtualMoreFrag(bfd, 0);
1001 ath_dma_unmap(sc, bf);
1003 ath_tgt_skb_free(sc, &bf->bf_skbhead,bf->bf_endpt);
1009 bf = ath_buf_toggle(sc, bf, 0);
1011 bf->bf_isretried = 0;
1014 asf_tailq_insert_tail(&sc->sc_txbuf, bf, bf_list);
1018 ath_tx_uc_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1020 ath_tx_status_update(sc, bf);
1021 ath_update_stats(sc, bf);
1022 ath_rate_tx_complete(sc, ATH_NODE_TARGET(bf->bf_node),
1023 bf->bf_lastds, bf->bf_rcs, 1, 0);
1027 ath_update_stats(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1029 struct ath_tx_desc *ds = bf->bf_desc;
1030 struct ieee80211_frame *wh = ATH_SKB2_WH(bf->bf_skb);
1033 if (ds->ds_txstat.ts_status == 0) {
1034 if (ds->ds_txstat.ts_rate & HAL_TXSTAT_ALTRATE)
1035 sc->sc_tx_stats.ast_tx_altrate++;
1037 if (ds->ds_txstat.ts_status & HAL_TXERR_XRETRY &&
1038 !IEEE80211_IS_MULTICAST(wh->i_addr1))
1039 sc->sc_tx_stats.ast_tx_xretries++;
1040 if (ds->ds_txstat.ts_status & HAL_TXERR_FIFO)
1041 sc->sc_tx_stats.ast_tx_fifoerr++;
1042 if (ds->ds_txstat.ts_status & HAL_TXERR_FILT)
1043 sc->sc_tx_stats.ast_tx_filtered++;
1044 if (ds->ds_txstat.ts_status & HAL_TXERR_TIMER_EXPIRED)
1045 sc->sc_tx_stats.ast_tx_timer_exp++;
1047 sr = ds->ds_txstat.ts_shortretry;
1048 lr = ds->ds_txstat.ts_longretry;
1049 sc->sc_tx_stats.ast_tx_shortretry += sr;
1050 sc->sc_tx_stats.ast_tx_longretry += lr;
1054 ath_tgt_send_mgt(struct ath_softc_tgt *sc,adf_nbuf_t hdr_buf, adf_nbuf_t skb,
1055 HTC_ENDPOINT_ID endpt)
1057 struct ieee80211_node_target *ni;
1058 struct ieee80211vap_target *vap;
1059 struct ath_vap_target *avp;
1060 struct ath_hal *ah = sc->sc_ah;
1061 a_uint8_t rix, txrate, ctsrate, cix = 0xff, *data;
1062 a_uint32_t subtype, flags, ctsduration;
1063 a_int32_t i, iswep, ismcast, hdrlen, pktlen, try0, len;
1064 struct ath_tx_desc *ds=NULL;
1065 struct ath_txq *txq=NULL;
1066 struct ath_tx_buf *bf;
1068 const HAL_RATE_TABLE *rt;
1069 HAL_BOOL shortPreamble;
1070 struct ieee80211_frame *wh;
1071 struct ath_rc_series rcs[4];
1072 HAL_11N_RATE_SERIES series[4];
1077 adf_nbuf_peek_header(skb, &data, &len);
1078 adf_nbuf_pull_head(skb, sizeof(ath_mgt_hdr_t));
1080 adf_nbuf_peek_header(hdr_buf, &data, &len);
1083 adf_os_assert(len >= sizeof(ath_mgt_hdr_t));
1085 mh = (ath_mgt_hdr_t *)data;
1086 adf_nbuf_peek_header(skb, &data, &len);
1087 wh = (struct ieee80211_frame *)data;
1089 adf_os_mem_set(rcs, 0, sizeof(struct ath_rc_series)*4);
1090 adf_os_mem_set(series, 0, sizeof(HAL_11N_RATE_SERIES)*4);
1092 bf = asf_tailq_first(&sc->sc_txbuf);
1096 asf_tailq_remove(&sc->sc_txbuf, bf, bf_list);
1098 ni = ath_tgt_find_node(sc, mh->ni_index);
1102 bf->bf_endpt = endpt;
1103 bf->bf_cookie = mh->cookie;
1104 bf->bf_protmode = mh->flags & (IEEE80211_PROT_RTSCTS | IEEE80211_PROT_CTSONLY);
1105 txq = &sc->sc_txq[1];
1106 iswep = wh->i_fc[1] & IEEE80211_FC1_WEP;
1107 ismcast = IEEE80211_IS_MULTICAST(wh->i_addr1);
1108 hdrlen = ieee80211_anyhdrsize(wh);
1110 keyix = HAL_TXKEYIX_INVALID;
1111 pktlen -= (hdrlen & 3);
1112 pktlen += IEEE80211_CRC_LEN;
1117 adf_nbuf_map(sc->sc_dev, bf->bf_dmamap, skb, ADF_OS_DMA_TO_DEVICE);
1120 adf_nbuf_queue_add(&bf->bf_skbhead, skb);
1123 rt = sc->sc_currates;
1124 adf_os_assert(rt != NULL);
1126 if (mh->flags == ATH_SHORT_PREAMBLE)
1127 shortPreamble = AH_TRUE;
1129 shortPreamble = AH_FALSE;
1131 flags = HAL_TXDESC_CLRDMASK;
1133 switch (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) {
1134 case IEEE80211_FC0_TYPE_MGT:
1135 subtype = wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK;
1137 if (subtype == IEEE80211_FC0_SUBTYPE_PROBE_RESP)
1138 atype = HAL_PKT_TYPE_PROBE_RESP;
1139 else if (subtype == IEEE80211_FC0_SUBTYPE_ATIM)
1140 atype = HAL_PKT_TYPE_ATIM;
1142 atype = HAL_PKT_TYPE_NORMAL;
1146 atype = HAL_PKT_TYPE_NORMAL;
1150 avp = &sc->sc_vap[mh->vap_index];
1152 rcs[0].rix = ath_get_minrateidx(sc, avp);
1153 rcs[0].tries = ATH_TXMAXTRY;
1156 adf_os_mem_copy(bf->bf_rcs, rcs, sizeof(rcs));
1158 try0 = rcs[0].tries;
1159 txrate = rt->info[rix].rateCode;
1162 txrate |= rt->info[rix].shortPreamble;
1169 flags |= HAL_TXDESC_NOACK;
1171 } else if (pktlen > vap->iv_rtsthreshold) {
1172 flags |= HAL_TXDESC_RTSENA;
1173 cix = rt->info[rix].controlRate;
1176 if ((bf->bf_protmode != IEEE80211_PROT_NONE) &&
1177 rt->info[rix].phy == IEEE80211_T_OFDM &&
1178 (flags & HAL_TXDESC_NOACK) == 0) {
1179 cix = rt->info[sc->sc_protrix].controlRate;
1180 sc->sc_tx_stats.ast_tx_protect++;
1183 *(a_uint16_t *)&wh->i_seq[0] = adf_os_cpu_to_le16(ni->ni_txseqmgmt <<
1184 IEEE80211_SEQ_SEQ_SHIFT);
1185 INCR(ni->ni_txseqmgmt, IEEE80211_SEQ_MAX);
1188 if (flags & (HAL_TXDESC_RTSENA|HAL_TXDESC_CTSENA)) {
1189 adf_os_assert(cix != 0xff);
1190 ctsrate = rt->info[cix].rateCode;
1191 if (shortPreamble) {
1192 ctsrate |= rt->info[cix].shortPreamble;
1193 if (flags & HAL_TXDESC_RTSENA) /* SIFS + CTS */
1194 ctsduration += rt->info[cix].spAckDuration;
1195 if ((flags & HAL_TXDESC_NOACK) == 0) /* SIFS + ACK */
1196 ctsduration += rt->info[cix].spAckDuration;
1198 if (flags & HAL_TXDESC_RTSENA) /* SIFS + CTS */
1199 ctsduration += rt->info[cix].lpAckDuration;
1200 if ((flags & HAL_TXDESC_NOACK) == 0) /* SIFS + ACK */
1201 ctsduration += rt->info[cix].lpAckDuration;
1203 ctsduration += ath_hal_computetxtime(ah,
1204 rt, pktlen, rix, shortPreamble);
1209 flags |= HAL_TXDESC_INTREQ;
1211 ah->ah_setupTxDesc(ds
1222 bf->bf_flags = flags;
1225 * Set key type in tx desc while sending the encrypted challenge to AP
1226 * in Auth frame 3 of Shared Authentication, owl needs this.
1228 if (iswep && (keyix != HAL_TXKEYIX_INVALID) &&
1229 (wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) == IEEE80211_FC0_SUBTYPE_AUTH)
1230 ah->ah_fillKeyTxDesc(ds, mh->keytype);
1232 ath_filltxdesc(sc, bf);
1234 for (i=0; i<4; i++) {
1235 series[i].Tries = 2;
1236 series[i].Rate = txrate;
1237 series[i].ChSel = sc->sc_ic.ic_tx_chainmask;
1238 series[i].RateFlags = 0;
1240 ah->ah_set11nRateScenario(ds, 0, ctsrate, series, 4, 0);
1241 ath_tgt_txqaddbuf(sc, txq, bf, bf->bf_lastds);
1245 HTC_ReturnBuffers(sc->tgt_htc_handle, endpt, skb);
1250 ath_tgt_txqaddbuf(struct ath_softc_tgt *sc,
1251 struct ath_txq *txq, struct ath_tx_buf *bf,
1252 struct ath_tx_desc *lastds)
1254 struct ath_hal *ah = sc->sc_ah;
1256 ATH_TXQ_INSERT_TAIL(txq, bf, bf_list);
1258 if (txq->axq_link == NULL) {
1259 ah->ah_setTxDP(ah, txq->axq_qnum, ATH_BUF_GET_DESC_PHY_ADDR(bf));
1261 *txq->axq_link = ATH_BUF_GET_DESC_PHY_ADDR(bf);
1264 txq->axq_link = &lastds->ds_link;
1265 ah->ah_startTxDma(ah, txq->axq_qnum);
1268 void ath_tgt_handle_normal(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1271 struct ath_node_target *an;
1273 an = (struct ath_node_target *)bf->bf_node;
1276 tid = &an->tid[bf->bf_tidno];
1279 bf->bf_comp = ath_tgt_tx_comp_normal;
1280 INCR(tid->seq_start, IEEE80211_SEQ_MAX);
1281 ath_tgt_tx_send_normal(sc, bf);
1285 ath_tgt_tx_enqueue(struct ath_txq *txq, struct ath_atx_tid *tid)
1293 tid->sched = AH_TRUE;
1294 asf_tailq_insert_tail(&txq->axq_tidq, tid, tid_qelem);
1298 ath_tgt_txq_schedule(struct ath_softc_tgt *sc, struct ath_txq *txq)
1300 struct ath_atx_tid *tid;
1306 TAILQ_DEQ(&txq->axq_tidq, tid, tid_qelem);
1311 tid->sched = AH_FALSE;
1316 if (!(tid->flag & TID_AGGR_ENABLED))
1317 ath_tgt_tx_sched_normal(sc,tid);
1319 ath_tgt_tx_sched_aggr(sc,tid);
1323 if (!asf_tailq_empty(&tid->buf_q)) {
1324 ath_tgt_tx_enqueue(txq, tid);
1327 } while (!asf_tailq_empty(&txq->axq_tidq) && !bdone);
1331 ath_tgt_handle_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1334 struct ath_node_target *an;
1335 struct ath_txq *txq = bf->bf_txq;
1336 a_bool_t queue_frame, within_baw;
1338 an = (struct ath_node_target *)bf->bf_node;
1341 tid = &an->tid[bf->bf_tidno];
1344 bf->bf_comp = ath_tgt_tx_comp_aggr;
1346 within_baw = BAW_WITHIN(tid->seq_start, tid->baw_size,
1347 SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1349 queue_frame = ( (txq->axq_depth >= ATH_AGGR_MIN_QDEPTH) ||
1350 (!asf_tailq_empty(&tid->buf_q)) ||
1351 (tid->paused) || (!within_baw) );
1354 asf_tailq_insert_tail(&tid->buf_q, bf, bf_list);
1355 ath_tgt_tx_enqueue(txq, tid);
1357 ath_tx_addto_baw(tid, bf);
1358 __stats(sc, txaggr_nframes);
1359 ath_tgt_tx_send_normal(sc, bf);
1364 ath_tgt_tx_sched_normal(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
1366 struct ath_tx_buf *bf;
1367 struct ath_txq *txq =TID_TO_ACTXQ(tid->tidno);;
1370 if (asf_tailq_empty(&tid->buf_q))
1373 bf = asf_tailq_first(&tid->buf_q);
1374 asf_tailq_remove(&tid->buf_q, bf, bf_list);
1375 ath_tgt_tx_send_normal(sc, bf);
1377 } while (txq->axq_depth < ATH_AGGR_MIN_QDEPTH);
1381 ath_tgt_tx_sched_aggr(struct ath_softc_tgt *sc, ath_atx_tid_t *tid)
1383 struct ath_tx_buf *bf, *bf_last;
1384 ATH_AGGR_STATUS status;
1385 ath_tx_bufhead bf_q;
1386 struct ath_txq *txq = TID_TO_ACTXQ(tid->tidno);
1387 struct ath_tx_desc *ds = NULL;
1388 struct ath_hal *ah = sc->sc_ah;
1392 if (asf_tailq_empty(&tid->buf_q))
1396 if (asf_tailq_empty(&tid->buf_q))
1399 asf_tailq_init(&bf_q);
1401 status = ath_tgt_tx_form_aggr(sc, tid, &bf_q);
1403 if (asf_tailq_empty(&bf_q))
1406 bf = asf_tailq_first(&bf_q);
1407 bf_last = asf_tailq_last(&bf_q, ath_tx_bufhead_s);
1409 if (bf->bf_nframes == 1) {
1411 if(bf->bf_retries == 0)
1412 __stats(sc, txaggr_single);
1414 bf->bf_lastds = &(bf->bf_descarr[bf->bf_dmamap_info.nsegs -1]);
1415 bf->bf_lastds->ds_link = 0;
1418 for(ds = bf->bf_desc; ds <= bf->bf_lastds; ds++)
1419 ah->ah_clr11nAggr(ds);
1421 ath_buf_set_rate(sc, bf);
1422 bf->bf_txq_add(sc, bf);
1427 bf_last->bf_next = NULL;
1428 bf_last->bf_lastds->ds_link = 0;
1429 bf_last->bf_ndelim = 0;
1432 ath_buf_set_rate(sc, bf);
1433 ah->ah_set11nAggrFirst(bf->bf_desc, bf->bf_al,
1435 bf->bf_lastds = bf_last->bf_lastds;
1437 for (i = 0; i < bf_last->bf_dmamap_info.nsegs; i++)
1438 ah->ah_set11nAggrLast(&bf_last->bf_descarr[i]);
1440 if (status == ATH_AGGR_8K_LIMITED) {
1445 bf->bf_txq_add(sc, bf);
1446 } while (txq->axq_depth < ATH_AGGR_MIN_QDEPTH &&
1447 status != ATH_TGT_AGGR_BAW_CLOSED);
1450 static u_int32_t ath_lookup_rate(struct ath_softc_tgt *sc,
1451 struct ath_node_target *an,
1452 struct ath_tx_buf *bf)
1455 u_int32_t max4msframelen, frame_length;
1456 u_int16_t aggr_limit, legacy=0;
1457 const HAL_RATE_TABLE *rt = sc->sc_currates;
1458 struct ieee80211_node_target *ieee_node = (struct ieee80211_node_target *)an;
1460 if (bf->bf_ismcast) {
1461 bf->bf_rcs[1].tries = bf->bf_rcs[2].tries = bf->bf_rcs[3].tries = 0;
1462 bf->bf_rcs[0].rix = 0xb;
1463 bf->bf_rcs[0].tries = ATH_TXMAXTRY - 1;
1464 bf->bf_rcs[0].flags = 0;
1466 ath_tgt_rate_findrate(sc, an, AH_TRUE, 0, ATH_TXMAXTRY-1, 4, 1,
1467 ATH_RC_PROBE_ALLOWED, bf->bf_rcs, &prate);
1470 max4msframelen = IEEE80211_AMPDU_LIMIT_MAX;
1472 for (i = 0; i < 4; i++) {
1473 if (bf->bf_rcs[i].tries) {
1474 frame_length = bf->bf_rcs[i].max4msframelen;
1476 if (rt->info[bf->bf_rcs[i].rix].phy != IEEE80211_T_HT) {
1481 max4msframelen = ATH_MIN(max4msframelen, frame_length);
1485 if (prate || legacy)
1488 if (sc->sc_ic.ic_enable_coex)
1489 aggr_limit = ATH_MIN((max4msframelen*3)/8, sc->sc_ic.ic_ampdu_limit);
1491 aggr_limit = ATH_MIN(max4msframelen, sc->sc_ic.ic_ampdu_limit);
1493 if (ieee_node->ni_maxampdu)
1494 aggr_limit = ATH_MIN(aggr_limit, ieee_node->ni_maxampdu);
1499 int ath_tgt_tx_form_aggr(struct ath_softc_tgt *sc, ath_atx_tid_t *tid,
1500 ath_tx_bufhead *bf_q)
1502 struct ath_tx_buf *bf_first ,*bf_prev = NULL;
1503 int nframes = 0, rl = 0;;
1504 struct ath_tx_desc *ds = NULL;
1505 struct ath_tx_buf *bf;
1506 struct ath_hal *ah = sc->sc_ah;
1507 u_int16_t aggr_limit = (64*1024 -1), al = 0, bpad = 0, al_delta;
1508 u_int16_t h_baw = tid->baw_size/2, prev_al = 0, prev_frames = 0;
1510 bf_first = asf_tailq_first(&tid->buf_q);
1513 bf = asf_tailq_first(&tid->buf_q);
1516 if (!BAW_WITHIN(tid->seq_start, tid->baw_size,
1517 SEQNO_FROM_BF_SEQNO(bf->bf_seqno))) {
1519 bf_first->bf_al= al;
1520 bf_first->bf_nframes = nframes;
1521 return ATH_TGT_AGGR_BAW_CLOSED;
1525 aggr_limit = ath_lookup_rate(sc, tid->an, bf);
1529 al_delta = ATH_AGGR_DELIM_SZ + bf->bf_pktlen;
1531 if (nframes && (aggr_limit < (al + bpad + al_delta + prev_al))) {
1532 bf_first->bf_al= al;
1533 bf_first->bf_nframes = nframes;
1534 return ATH_TGT_AGGR_LIMITED;
1538 if ((nframes + prev_frames) >= ATH_MIN((h_baw), 17)) {
1540 if ((nframes + prev_frames) >= ATH_MIN((h_baw), 22)) {
1542 bf_first->bf_al= al;
1543 bf_first->bf_nframes = nframes;
1544 return ATH_TGT_AGGR_LIMITED;
1547 ath_tx_addto_baw(tid, bf);
1548 asf_tailq_remove(&tid->buf_q, bf, bf_list);
1549 asf_tailq_insert_tail(bf_q, bf, bf_list);
1554 adf_os_assert(bf->bf_comp == ath_tgt_tx_comp_aggr);
1556 al += bpad + al_delta;
1557 bf->bf_ndelim = ATH_AGGR_GET_NDELIM(bf->bf_pktlen);
1559 switch (bf->bf_keytype) {
1560 case HAL_KEY_TYPE_AES:
1561 bf->bf_ndelim += ATH_AGGR_ENCRYPTDELIM;
1563 case HAL_KEY_TYPE_WEP:
1564 case HAL_KEY_TYPE_TKIP:
1565 bf->bf_ndelim += 64;
1567 case HAL_KEY_TYPE_WAPI:
1568 bf->bf_ndelim += 12;
1574 bpad = PADBYTES(al_delta) + (bf->bf_ndelim << 2);
1577 bf_prev->bf_next = bf;
1578 bf_prev->bf_lastds->ds_link = ATH_BUF_GET_DESC_PHY_ADDR(bf);
1582 for(ds = bf->bf_desc; ds <= bf->bf_lastds; ds++)
1583 ah->ah_set11nAggrMiddle(ds, bf->bf_ndelim);
1585 } while (!asf_tailq_empty(&tid->buf_q));
1587 bf_first->bf_al= al;
1588 bf_first->bf_nframes = nframes;
1590 return ATH_TGT_AGGR_DONE;
1593 void ath_tx_addto_baw(ath_atx_tid_t *tid, struct ath_tx_buf *bf)
1597 if (bf->bf_isretried) {
1601 index = ATH_BA_INDEX(tid->seq_start, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1602 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1);
1604 TX_BUF_BITMAP_SET(tid->tx_buf_bitmap, cindex);
1606 if (index >= ((tid->baw_tail - tid->baw_head) & (ATH_TID_MAX_BUFS - 1))) {
1607 tid->baw_tail = cindex;
1608 INCR(tid->baw_tail, ATH_TID_MAX_BUFS);
1612 void ath_tgt_tx_comp_aggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1614 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1615 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1616 struct ath_tx_desc lastds;
1617 struct ath_tx_desc *ds = &lastds;
1618 struct ath_rc_series rcs[4];
1623 int nframes = bf->bf_nframes;
1624 struct ath_tx_buf *bf_next;
1625 ath_tx_bufhead bf_q;
1627 struct ath_tx_buf *bar = NULL;
1628 struct ath_txq *txq;
1632 if (tid->flag & TID_CLEANUP_INPROGRES) {
1633 ath_tx_comp_cleanup(sc, bf);
1637 adf_os_mem_copy(ds, bf->bf_lastds, sizeof (struct ath_tx_desc));
1638 adf_os_mem_copy(rcs, bf->bf_rcs, sizeof(rcs));
1640 if (ds->ds_txstat.ts_flags == HAL_TX_SW_FILTERED) {
1645 if (!bf->bf_isaggr) {
1646 ath_tx_comp_unaggr(sc, bf);
1650 __stats(sc, tx_compaggr);
1652 asf_tailq_init(&bf_q);
1654 seq_st = ATH_DS_BA_SEQ(ds);
1655 ba = ATH_DS_BA_BITMAP(ds);
1656 tx_ok = (ATH_DS_TX_STATUS(ds) == HAL_OK);
1658 if (ATH_DS_TX_STATUS(ds) & HAL_TXERR_XRETRY) {
1659 ath_tx_comp_aggr_error(sc, bf, tid);
1663 if (tx_ok && !ATH_DS_TX_BA(ds)) {
1664 __stats(sc, txaggr_babug);
1665 adf_os_print("BA Bug?\n");
1666 ath_tx_comp_aggr_error(sc, bf, tid);
1671 ba_index = ATH_BA_INDEX(seq_st, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1672 bf_next = bf->bf_next;
1674 if (tx_ok && ATH_BA_ISSET(ba, ba_index)) {
1675 __stats(sc, txaggr_compgood);
1676 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1677 ath_tx_status_update_aggr(sc, bf, ds, rcs, 1);
1678 ath_tx_freebuf(sc, bf);
1680 ath_tx_retry_subframe(sc, bf, &bf_q, &bar);
1686 ath_update_aggr_stats(sc, ds, nframes, nbad);
1687 ath_rate_tx_complete(sc, an, ds, rcs, nframes, nbad);
1690 ath_bar_tx(sc, tid, bar);
1693 if (!asf_tailq_empty(&bf_q)) {
1694 __stats(sc, txaggr_prepends);
1695 TAILQ_INSERTQ_HEAD(&tid->buf_q, &bf_q, bf_list);
1696 ath_tgt_tx_enqueue(txq, tid);
1701 ath_tx_comp_aggr_error(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
1706 struct ath_tx_desc lastds;
1707 struct ath_tx_desc *ds = &lastds;
1708 struct ath_rc_series rcs[4];
1709 struct ath_tx_buf *bar = NULL;
1710 struct ath_tx_buf *bf_next;
1711 int nframes = bf->bf_nframes;
1712 ath_tx_bufhead bf_q;
1713 struct ath_txq *txq;
1715 asf_tailq_init(&bf_q);
1718 adf_os_mem_copy(ds, bf->bf_lastds, sizeof (struct ath_tx_desc));
1719 adf_os_mem_copy(rcs, bf->bf_rcs, sizeof(rcs));
1722 bf_next = bf->bf_next;
1723 ath_tx_retry_subframe(sc, bf, &bf_q, &bar);
1727 ath_update_aggr_stats(sc, ds, nframes, nframes);
1728 ath_rate_tx_complete(sc, tid->an, ds, rcs, nframes, nframes);
1731 ath_bar_tx(sc, tid, bar);
1734 if (!asf_tailq_empty(&bf_q)) {
1735 __stats(sc, txaggr_prepends);
1736 TAILQ_INSERTQ_HEAD(&tid->buf_q, &bf_q, bf_list);
1737 ath_tgt_tx_enqueue(txq, tid);
1742 ath_tx_comp_cleanup(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1745 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1746 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1747 struct ath_tx_desc lastds;
1748 struct ath_tx_desc *ds = &lastds;
1749 struct ath_rc_series rcs[4];
1751 int nframes = bf->bf_nframes;
1752 struct ath_tx_buf *bf_next;
1754 adf_os_mem_copy(ds, bf->bf_lastds, sizeof (struct ath_tx_desc));
1755 adf_os_mem_copy(rcs, bf->bf_rcs, sizeof(rcs));
1757 if (!bf->bf_isaggr) {
1758 ath_update_stats(sc, bf);
1760 __stats(sc, tx_compunaggr);
1762 ath_tx_status_update(sc, bf);
1764 ath_tx_freebuf(sc, bf);
1766 if (tid->flag & TID_CLEANUP_INPROGRES) {
1767 owl_tgt_tid_cleanup(sc, tid);
1775 bf_next = bf->bf_next;
1777 ath_tx_status_update_aggr(sc, bf, ds, rcs, 0);
1779 ath_tx_freebuf(sc, bf);
1783 tid->flag &= ~TID_CLEANUP_INPROGRES;
1784 ath_aggr_resume_tid(sc, tid);
1791 ath_update_aggr_stats(sc, ds, nframes, nbad);
1792 ath_rate_tx_complete(sc, an, ds, rcs, nframes, nbad);
1796 ath_tx_retry_subframe(struct ath_softc_tgt *sc, struct ath_tx_buf *bf,
1797 ath_tx_bufhead *bf_q, struct ath_tx_buf **bar)
1800 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1801 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1802 struct ath_tx_desc *ds = NULL;
1803 struct ath_hal *ah = sc->sc_ah;
1806 __stats(sc, txaggr_compretries);
1808 for(ds = bf->bf_desc, i = 0; i < bf->bf_dmamap_info.nsegs; ds++, i++) {
1809 ah->ah_clr11nAggr(ds);
1810 ah->ah_set11nBurstDuration(ds, 0);
1811 ah->ah_set11nVirtualMoreFrag(ds, 0);
1814 if (bf->bf_retries >= OWLMAX_RETRIES) {
1815 __stats(sc, txaggr_xretries);
1816 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1817 ath_tx_status_update_aggr(sc, bf, bf->bf_lastds, NULL, 0);
1822 ath_tx_freebuf(sc, bf);
1827 __stats(sc, txaggr_errlast);
1828 bf = ath_buf_toggle(sc, bf, 1);
1830 bf->bf_lastds = &(bf->bf_descarr[bf->bf_dmamap_info.nsegs - 1]);
1832 ath_tx_set_retry(sc, bf);
1833 asf_tailq_insert_tail(bf_q, bf, bf_list);
1837 ath_update_aggr_stats(struct ath_softc_tgt *sc,
1838 struct ath_tx_desc *ds, int nframes,
1842 u_int8_t status = ATH_DS_TX_STATUS(ds);
1843 u_int8_t txflags = ATH_DS_TX_FLAGS(ds);
1845 __statsn(sc, txaggr_longretries, ds->ds_txstat.ts_longretry);
1846 __statsn(sc, txaggr_shortretries, ds->ds_txstat.ts_shortretry);
1848 if (txflags & HAL_TX_DESC_CFG_ERR)
1849 __stats(sc, txaggr_desc_cfgerr);
1851 if (txflags & HAL_TX_DATA_UNDERRUN)
1852 __stats(sc, txaggr_data_urun);
1854 if (txflags & HAL_TX_DELIM_UNDERRUN)
1855 __stats(sc, txaggr_delim_urun);
1861 if (status & HAL_TXERR_XRETRY)
1862 __stats(sc, txaggr_compxretry);
1864 if (status & HAL_TXERR_FILT)
1865 __stats(sc, txaggr_filtered);
1867 if (status & HAL_TXERR_FIFO)
1868 __stats(sc, txaggr_fifo);
1870 if (status & HAL_TXERR_XTXOP)
1871 __stats(sc, txaggr_xtxop);
1873 if (status & HAL_TXERR_TIMER_EXPIRED)
1874 __stats(sc, txaggr_timer_exp);
1878 ath_tx_comp_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1880 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1881 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1882 struct ath_tx_desc *ds = bf->bf_lastds;
1884 ath_update_stats(sc, bf);
1885 ath_rate_tx_complete(sc, an, ds, bf->bf_rcs, 1, 0);
1887 if (ATH_DS_TX_STATUS(ds) & HAL_TXERR_XRETRY) {
1888 ath_tx_retry_unaggr(sc, bf);
1891 __stats(sc, tx_compunaggr);
1893 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1894 ath_tx_status_update(sc, bf);
1895 ath_tx_freebuf(sc, bf);
1899 ath_tx_retry_unaggr(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1901 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
1902 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
1903 struct ath_txq *txq;
1907 if (bf->bf_retries >= OWLMAX_RETRIES) {
1908 __stats(sc, txunaggr_xretry);
1909 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1910 ath_tx_status_update(sc, bf);
1911 ath_bar_tx(sc, tid, bf);
1915 __stats(sc, txunaggr_compretries);
1916 if (!bf->bf_lastds->ds_link) {
1917 __stats(sc, txunaggr_errlast);
1918 bf = ath_buf_toggle(sc, bf, 1);
1921 ath_tx_set_retry(sc, bf);
1922 asf_tailq_insert_head(&tid->buf_q, bf, bf_list);
1923 ath_tgt_tx_enqueue(txq, tid);
1927 ath_tx_update_baw(ath_atx_tid_t *tid, int seqno)
1932 index = ATH_BA_INDEX(tid->seq_start, seqno);
1933 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1);
1935 TX_BUF_BITMAP_CLR(tid->tx_buf_bitmap, cindex);
1937 while (tid->baw_head != tid->baw_tail &&
1938 (!TX_BUF_BITMAP_IS_SET(tid->tx_buf_bitmap, tid->baw_head))) {
1939 INCR(tid->seq_start, IEEE80211_SEQ_MAX);
1940 INCR(tid->baw_head, ATH_TID_MAX_BUFS);
1944 static void ath_tx_set_retry(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
1946 struct ieee80211_frame *wh;
1948 __stats(sc, txaggr_retries);
1950 bf->bf_isretried = 1;
1952 wh = ATH_SKB_2_WH(bf->bf_skb);
1953 wh->i_fc[1] |= IEEE80211_FC1_RETRY;
1956 void ath_tgt_tx_cleanup(struct ath_softc_tgt *sc, struct ath_node_target *an,
1957 ath_atx_tid_t *tid, a_uint8_t discard_all)
1959 struct ath_tx_buf *bf;
1960 struct ath_tx_buf *bf_next;
1962 bf = asf_tailq_first(&tid->buf_q);
1965 if (discard_all || bf->bf_isretried) {
1966 bf_next = asf_tailq_next(bf, bf_list);
1967 TAILQ_DEQ(&tid->buf_q, bf, bf_list);
1968 if (bf->bf_isretried)
1969 ath_tx_update_baw(tid, SEQNO_FROM_BF_SEQNO(bf->bf_seqno));
1970 ath_tx_freebuf(sc, bf);
1974 bf->bf_comp = ath_tgt_tx_comp_normal;
1975 bf = asf_tailq_next(bf, bf_list);
1978 ath_aggr_pause_tid(sc, tid);
1980 while (tid->baw_head != tid->baw_tail) {
1981 if (TX_BUF_BITMAP_IS_SET(tid->tx_buf_bitmap, tid->baw_head)) {
1983 tid->flag |= TID_CLEANUP_INPROGRES;
1984 TX_BUF_BITMAP_CLR(tid->tx_buf_bitmap, tid->baw_head);
1986 INCR(tid->baw_head, ATH_TID_MAX_BUFS);
1987 INCR(tid->seq_start, IEEE80211_SEQ_MAX);
1990 if (!(tid->flag & TID_CLEANUP_INPROGRES)) {
1991 ath_aggr_resume_tid(sc, tid);
1995 /******************/
1996 /* BAR Management */
1997 /******************/
1999 static void ath_tgt_delba_send(struct ath_softc_tgt *sc,
2000 struct ieee80211_node_target *ni,
2001 a_uint8_t tidno, a_uint8_t initiator,
2002 a_uint16_t reasoncode)
2004 struct ath_node_target *an = ATH_NODE_TARGET(ni);
2005 ath_atx_tid_t *tid = ATH_AN_2_TID(an, tidno);
2006 struct wmi_data_delba wmi_delba;
2008 tid->flag &= ~TID_AGGR_ENABLED;
2010 ath_tgt_tx_cleanup(sc, an, tid, 1);
2012 wmi_delba.ni_nodeindex = ni->ni_nodeindex;
2013 wmi_delba.tidno = tid->tidno;
2014 wmi_delba.initiator = 1;
2015 wmi_delba.reasoncode = IEEE80211_REASON_UNSPECIFIED;
2017 __stats(sc, txbar_xretry);
2018 wmi_event(sc->tgt_wmi_handle,
2024 static void ath_bar_retry(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
2026 struct ath_node_target *an = ATH_NODE_TARGET(bf->bf_node);
2027 ath_atx_tid_t *tid = ATH_AN_2_TID(an, bf->bf_tidno);
2029 if (bf->bf_retries >= OWLMAX_BAR_RETRIES) {
2030 ath_tgt_delba_send(sc, bf->bf_node, tid->tidno, 1,
2031 IEEE80211_REASON_UNSPECIFIED);
2032 ath_tgt_tid_drain(sc, tid);
2035 ath_buf_comp(sc, bf);
2039 __stats(sc, txbar_compretries);
2041 if (!bf->bf_lastds->ds_link) {
2042 __stats(sc, txbar_errlast);
2043 bf = ath_buf_toggle(sc, bf, 1);
2046 bf->bf_lastds->ds_link = 0;
2048 ath_tx_set_retry(sc, bf);
2049 ath_tgt_txq_add_ucast(sc, bf);
2052 static void ath_bar_tx_comp(struct ath_softc_tgt *sc, struct ath_tx_buf *bf)
2054 struct ath_tx_desc *ds = bf->bf_lastds;
2055 struct ath_node_target *an;
2058 an = (struct ath_node_target *)bf->bf_node;
2059 tid = &an->tid[bf->bf_tidno];
2061 if (ATH_DS_TX_STATUS(ds) & HAL_TXERR_XRETRY) {
2062 ath_bar_retry(sc, bf);
2066 ath_aggr_resume_tid(sc, tid);
2069 ath_buf_comp(sc, bf);
2072 static void ath_bar_tx(struct ath_softc_tgt *sc,
2073 ath_atx_tid_t *tid, struct ath_tx_buf *bf)
2076 struct ieee80211_frame_bar *bar;
2078 struct ath_tx_desc *ds, *ds0;
2079 struct ath_hal *ah = sc->sc_ah;
2080 HAL_11N_RATE_SERIES series[4];
2085 __stats(sc, tx_bars);
2087 adf_os_mem_set(&series, 0, sizeof(series));
2089 ath_aggr_pause_tid(sc, tid);
2091 skb = adf_nbuf_queue_remove(&bf->bf_skbhead);
2092 adf_nbuf_peek_header(skb, &anbdata, &anblen);
2093 adf_nbuf_trim_tail(skb, anblen);
2094 bar = (struct ieee80211_frame_bar *) anbdata;
2098 ath_dma_unmap(sc, bf);
2099 adf_nbuf_queue_add(&bf->bf_skbhead, skb);
2101 bar->i_fc[1] = IEEE80211_FC1_DIR_NODS;
2102 bar->i_fc[0] = IEEE80211_FC0_VERSION_0 |
2103 IEEE80211_FC0_TYPE_CTL |
2104 IEEE80211_FC0_SUBTYPE_BAR;
2105 bar->i_ctl = tid->tidno << IEEE80211_BAR_CTL_TID_S |
2106 IEEE80211_BAR_CTL_COMBA;
2107 bar->i_seq = adf_os_cpu_to_le16(tid->seq_start << IEEE80211_SEQ_SEQ_SHIFT);
2109 bf->bf_seqno = tid->seq_start << IEEE80211_SEQ_SEQ_SHIFT;
2111 adf_nbuf_put_tail(skb, sizeof(struct ieee80211_frame_bar));
2113 bf->bf_comp = ath_bar_tx_comp;
2114 bf->bf_tidno = tid->tidno;
2115 bf->bf_node = &tid->an->ni;
2116 ath_dma_map(sc, bf);
2117 adf_nbuf_dmamap_info(bf->bf_dmamap, &bf->bf_dmamap_info);
2120 ah->ah_setupTxDesc(ds
2121 , adf_nbuf_len(skb) + IEEE80211_CRC_LEN
2123 , HAL_PKT_TYPE_NORMAL
2129 | HAL_TXDESC_CLRDMASK
2135 for (ds0 = ds, i=0; i < bf->bf_dmamap_info.nsegs; ds0++, i++) {
2136 ah->ah_clr11nAggr(ds0);
2139 ath_filltxdesc(sc, bf);
2141 for (i = 0 ; i < 4; i++) {
2142 series[i].Tries = ATH_TXMAXTRY;
2143 series[i].Rate = min_rate;
2144 series[i].ChSel = sc->sc_ic.ic_tx_chainmask;
2147 ah->ah_set11nRateScenario(bf->bf_desc, 0, 0, series, 4, 4);
2148 ath_tgt_txq_add_ucast(sc, bf);